layers: Clang-format changes for files in layers dir of repo
diff --git a/layers/device_limits.cpp b/layers/device_limits.cpp
index 699abf4..9d8f033 100644
--- a/layers/device_limits.cpp
+++ b/layers/device_limits.cpp
@@ -64,16 +64,16 @@
// This struct will be stored in a map hashed by the dispatchable object
struct layer_data {
- debug_report_data *report_data;
- std::vector<VkDebugReportCallbackEXT> logging_callback;
- VkLayerDispatchTable *device_dispatch_table;
- VkLayerInstanceDispatchTable *instance_dispatch_table;
- devExts device_extensions;
+ debug_report_data *report_data;
+ std::vector<VkDebugReportCallbackEXT> logging_callback;
+ VkLayerDispatchTable *device_dispatch_table;
+ VkLayerInstanceDispatchTable *instance_dispatch_table;
+ devExts device_extensions;
// Track state of each instance
- unique_ptr<INSTANCE_STATE> instanceState;
- unique_ptr<PHYSICAL_DEVICE_STATE> physicalDeviceState;
- VkPhysicalDeviceFeatures actualPhysicalDeviceFeatures;
- VkPhysicalDeviceFeatures requestedPhysicalDeviceFeatures;
+ unique_ptr<INSTANCE_STATE> instanceState;
+ unique_ptr<PHYSICAL_DEVICE_STATE> physicalDeviceState;
+ VkPhysicalDeviceFeatures actualPhysicalDeviceFeatures;
+ VkPhysicalDeviceFeatures requestedPhysicalDeviceFeatures;
unordered_map<VkDevice, VkPhysicalDeviceProperties> physDevPropertyMap;
// Track physical device per logical device
@@ -81,33 +81,29 @@
// Vector indices correspond to queueFamilyIndex
vector<unique_ptr<VkQueueFamilyProperties>> queueFamilyProperties;
- layer_data() :
- report_data(nullptr),
- device_dispatch_table(nullptr),
- instance_dispatch_table(nullptr),
- device_extensions(),
- instanceState(nullptr),
- physicalDeviceState(nullptr),
- actualPhysicalDeviceFeatures(),
- requestedPhysicalDeviceFeatures(),
- physicalDevice()
- {};
+ layer_data()
+ : report_data(nullptr), device_dispatch_table(nullptr),
+ instance_dispatch_table(nullptr), device_extensions(),
+ instanceState(nullptr), physicalDeviceState(nullptr),
+ actualPhysicalDeviceFeatures(), requestedPhysicalDeviceFeatures(),
+ physicalDevice(){};
};
static unordered_map<void *, layer_data *> layer_data_map;
static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
-// TODO : This can be much smarter, using separate locks for separate global data
+// TODO : This can be much smarter, using separate locks for separate global
+// data
static int globalLockInitialized = 0;
static loader_platform_thread_mutex globalLock;
-template layer_data *get_my_data_ptr<layer_data>(
- void *data_key,
- std::unordered_map<void *, layer_data *> &data_map);
+template layer_data *
+get_my_data_ptr<layer_data>(void *data_key,
+ std::unordered_map<void *, layer_data *> &data_map);
-static void init_device_limits(layer_data *my_data, const VkAllocationCallbacks *pAllocator)
-{
+static void init_device_limits(layer_data *my_data,
+ const VkAllocationCallbacks *pAllocator) {
uint32_t report_flags = 0;
uint32_t debug_action = 0;
FILE *log_output = NULL;
@@ -115,10 +111,9 @@
VkDebugReportCallbackEXT callback;
// initialize DeviceLimits options
report_flags = getLayerOptionFlags("DeviceLimitsReportFlags", 0);
- getLayerOptionEnum("DeviceLimitsDebugAction", (uint32_t *) &debug_action);
+ getLayerOptionEnum("DeviceLimitsDebugAction", (uint32_t *)&debug_action);
- if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG)
- {
+ if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
option_str = getLayerOption("DeviceLimitsLogFilename");
log_output = getLayerLogOutput(option_str, "DeviceLimits");
VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
@@ -126,8 +121,9 @@
dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
dbgCreateInfo.flags = report_flags;
dbgCreateInfo.pfnCallback = log_callback;
- dbgCreateInfo.pUserData = (void *) log_output;
- layer_create_msg_callback(my_data->report_data, &dbgCreateInfo, pAllocator, &callback);
+ dbgCreateInfo.pUserData = (void *)log_output;
+ layer_create_msg_callback(my_data->report_data, &dbgCreateInfo,
+ pAllocator, &callback);
my_data->logging_callback.push_back(callback);
}
@@ -138,12 +134,12 @@
dbgCreateInfo.flags = report_flags;
dbgCreateInfo.pfnCallback = win32_debug_output_msg;
dbgCreateInfo.pUserData = NULL;
- layer_create_msg_callback(my_data->report_data, &dbgCreateInfo, pAllocator, &callback);
+ layer_create_msg_callback(my_data->report_data, &dbgCreateInfo,
+ pAllocator, &callback);
my_data->logging_callback.push_back(callback);
}
- if (!globalLockInitialized)
- {
+ if (!globalLockInitialized) {
// TODO/TBD: Need to delete this mutex sometime. How??? One
// suggestion is to call this during vkCreateInstance(), and then we
// can clean it up during vkDestroyInstance(). However, that requires
@@ -155,45 +151,40 @@
}
static const VkExtensionProperties instance_extensions[] = {
- {
- VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
- VK_EXT_DEBUG_REPORT_SPEC_VERSION
- }
-};
+ {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
- const char *pLayerName,
- uint32_t *pCount,
- VkExtensionProperties* pProperties)
-{
- return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceExtensionProperties(const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
+ return util_GetExtensionProperties(1, instance_extensions, pCount,
+ pProperties);
}
-static const VkLayerProperties dl_global_layers[] = {
- {
- "VK_LAYER_LUNARG_device_limits",
- VK_API_VERSION,
- VK_MAKE_VERSION(0, 1, 0),
- "Validation layer: Device Limits",
- }
-};
+static const VkLayerProperties dl_global_layers[] = {{
+ "VK_LAYER_LUNARG_device_limits", VK_API_VERSION, VK_MAKE_VERSION(0, 1, 0),
+ "Validation layer: Device Limits",
+}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
- uint32_t *pCount,
- VkLayerProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+ VkLayerProperties *pProperties) {
return util_GetLayerProperties(ARRAY_SIZE(dl_global_layers),
- dl_global_layers,
- pCount, pProperties);
+ dl_global_layers, pCount, pProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance)
-{
- VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkInstance *pInstance) {
+ VkLayerInstanceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkCreateInstance fpCreateInstance =
+ (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
if (fpCreateInstance == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -205,15 +196,16 @@
if (result != VK_SUCCESS)
return result;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
my_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
- layer_init_instance_dispatch_table(*pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
+ layer_init_instance_dispatch_table(
+ *pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
my_data->report_data = debug_report_create_instance(
- my_data->instance_dispatch_table,
- *pInstance,
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
+ my_data->instance_dispatch_table, *pInstance,
+ pCreateInfo->enabledExtensionCount,
+ pCreateInfo->ppEnabledExtensionNames);
init_device_limits(my_data, pAllocator);
my_data->instanceState = unique_ptr<INSTANCE_STATE>(new INSTANCE_STATE());
@@ -222,8 +214,9 @@
}
/* hook DestroyInstance to remove tableInstanceMap entry */
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyInstance(VkInstance instance,
+ const VkAllocationCallbacks *pAllocator) {
dispatch_key key = get_dispatch_key(instance);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
@@ -246,235 +239,389 @@
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumeratePhysicalDevices(VkInstance instance,
+ uint32_t *pPhysicalDeviceCount,
+ VkPhysicalDevice *pPhysicalDevices) {
VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
if (my_data->instanceState) {
- // For this instance, flag when vkEnumeratePhysicalDevices goes to QUERY_COUNT and then QUERY_DETAILS
+ // For this instance, flag when vkEnumeratePhysicalDevices goes to
+ // QUERY_COUNT and then QUERY_DETAILS
if (NULL == pPhysicalDevices) {
- my_data->instanceState->vkEnumeratePhysicalDevicesState = QUERY_COUNT;
+ my_data->instanceState->vkEnumeratePhysicalDevicesState =
+ QUERY_COUNT;
} else {
- if (UNCALLED == my_data->instanceState->vkEnumeratePhysicalDevicesState) {
- // Flag error here, shouldn't be calling this without having queried count
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, 0, __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
- "Invalid call sequence to vkEnumeratePhysicalDevices() w/ non-NULL pPhysicalDevices. You should first call vkEnumeratePhysicalDevices() w/ NULL pPhysicalDevices to query pPhysicalDeviceCount.");
- } // TODO : Could also flag a warning if re-calling this function in QUERY_DETAILS state
- else if (my_data->instanceState->physicalDevicesCount != *pPhysicalDeviceCount) {
- // TODO: Having actual count match count from app is not a requirement, so this can be a warning
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_COUNT_MISMATCH, "DL",
- "Call to vkEnumeratePhysicalDevices() w/ pPhysicalDeviceCount value %u, but actual count supported by this instance is %u.", *pPhysicalDeviceCount, my_data->instanceState->physicalDevicesCount);
+ if (UNCALLED ==
+ my_data->instanceState->vkEnumeratePhysicalDevicesState) {
+ // Flag error here, shouldn't be calling this without having
+ // queried count
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, 0, __LINE__,
+ DEVLIMITS_MUST_QUERY_COUNT, "DL",
+ "Invalid call sequence to vkEnumeratePhysicalDevices() w/ "
+ "non-NULL pPhysicalDevices. You should first call "
+ "vkEnumeratePhysicalDevices() w/ NULL pPhysicalDevices to "
+ "query pPhysicalDeviceCount.");
+ } // TODO : Could also flag a warning if re-calling this function in
+ // QUERY_DETAILS state
+ else if (my_data->instanceState->physicalDevicesCount !=
+ *pPhysicalDeviceCount) {
+ // TODO: Having actual count match count from app is not a
+ // requirement, so this can be a warning
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_COUNT_MISMATCH, "DL",
+ "Call to vkEnumeratePhysicalDevices() w/ "
+ "pPhysicalDeviceCount value %u, but actual count "
+ "supported by this instance is %u.",
+ *pPhysicalDeviceCount,
+ my_data->instanceState->physicalDevicesCount);
}
- my_data->instanceState->vkEnumeratePhysicalDevicesState = QUERY_DETAILS;
+ my_data->instanceState->vkEnumeratePhysicalDevicesState =
+ QUERY_DETAILS;
}
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = my_data->instance_dispatch_table->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
+ VkResult result =
+ my_data->instance_dispatch_table->EnumeratePhysicalDevices(
+ instance, pPhysicalDeviceCount, pPhysicalDevices);
if (NULL == pPhysicalDevices) {
- my_data->instanceState->physicalDevicesCount = *pPhysicalDeviceCount;
+ my_data->instanceState->physicalDevicesCount =
+ *pPhysicalDeviceCount;
} else { // Save physical devices
- for (uint32_t i=0; i < *pPhysicalDeviceCount; i++) {
- layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(pPhysicalDevices[i]), layer_data_map);
- phy_dev_data->physicalDeviceState = unique_ptr<PHYSICAL_DEVICE_STATE>(new PHYSICAL_DEVICE_STATE());
+ for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
+ layer_data *phy_dev_data = get_my_data_ptr(
+ get_dispatch_key(pPhysicalDevices[i]), layer_data_map);
+ phy_dev_data->physicalDeviceState =
+ unique_ptr<PHYSICAL_DEVICE_STATE>(
+ new PHYSICAL_DEVICE_STATE());
// Init actual features for each physical device
- my_data->instance_dispatch_table->GetPhysicalDeviceFeatures(pPhysicalDevices[i], &(phy_dev_data->actualPhysicalDeviceFeatures));
+ my_data->instance_dispatch_table->GetPhysicalDeviceFeatures(
+ pPhysicalDevices[i],
+ &(phy_dev_data->actualPhysicalDeviceFeatures));
}
}
return result;
} else {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, 0, __LINE__, DEVLIMITS_INVALID_INSTANCE, "DL",
- "Invalid instance (%#" PRIxLEAST64 ") passed into vkEnumeratePhysicalDevices().", (uint64_t)instance);
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, 0, __LINE__,
+ DEVLIMITS_INVALID_INSTANCE, "DL",
+ "Invalid instance (%#" PRIxLEAST64
+ ") passed into vkEnumeratePhysicalDevices().",
+ (uint64_t)instance);
}
return VK_ERROR_VALIDATION_FAILED_EXT;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
-{
- layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- phy_dev_data->physicalDeviceState->vkGetPhysicalDeviceFeaturesState = QUERY_DETAILS;
- phy_dev_data->instance_dispatch_table->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceFeatures *pFeatures) {
+ layer_data *phy_dev_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ phy_dev_data->physicalDeviceState->vkGetPhysicalDeviceFeaturesState =
+ QUERY_DETAILS;
+ phy_dev_data->instance_dispatch_table->GetPhysicalDeviceFeatures(
+ physicalDevice, pFeatures);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties)
-{
- get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map)->instance_dispatch_table->GetPhysicalDeviceFormatProperties(
- physicalDevice, format, pFormatProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,
+ VkFormat format,
+ VkFormatProperties *pFormatProperties) {
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map)
+ ->instance_dispatch_table->GetPhysicalDeviceFormatProperties(
+ physicalDevice, format, pFormatProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
-{
- return get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map)->instance_dispatch_table->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetPhysicalDeviceImageFormatProperties(
+ VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+ VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
+ VkImageFormatProperties *pImageFormatProperties) {
+ return get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map)
+ ->instance_dispatch_table->GetPhysicalDeviceImageFormatProperties(
+ physicalDevice, format, type, tiling, usage, flags,
+ pImageFormatProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties)
-{
- layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- phy_dev_data->instance_dispatch_table->GetPhysicalDeviceProperties(physicalDevice, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceProperties *pProperties) {
+ layer_data *phy_dev_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ phy_dev_data->instance_dispatch_table->GetPhysicalDeviceProperties(
+ physicalDevice, pProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t* pCount, VkQueueFamilyProperties* pQueueFamilyProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceQueueFamilyProperties(
+ VkPhysicalDevice physicalDevice, uint32_t *pCount,
+ VkQueueFamilyProperties *pQueueFamilyProperties) {
VkBool32 skipCall = VK_FALSE;
- layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ layer_data *phy_dev_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
if (phy_dev_data->physicalDeviceState) {
if (NULL == pQueueFamilyProperties) {
- phy_dev_data->physicalDeviceState->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
+ phy_dev_data->physicalDeviceState
+ ->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_COUNT;
} else {
- // Verify that for each physical device, this function is called first with NULL pQueueFamilyProperties ptr in order to get count
- if (UNCALLED == phy_dev_data->physicalDeviceState->vkGetPhysicalDeviceQueueFamilyPropertiesState) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
- "Invalid call sequence to vkGetPhysicalDeviceQueueFamilyProperties() w/ non-NULL pQueueFamilyProperties. You should first call vkGetPhysicalDeviceQueueFamilyProperties() w/ NULL pQueueFamilyProperties to query pCount.");
+ // Verify that for each physical device, this function is called
+ // first with NULL pQueueFamilyProperties ptr in order to get count
+ if (UNCALLED ==
+ phy_dev_data->physicalDeviceState
+ ->vkGetPhysicalDeviceQueueFamilyPropertiesState) {
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
+ "Invalid call sequence to "
+ "vkGetPhysicalDeviceQueueFamilyProperties() w/ non-NULL "
+ "pQueueFamilyProperties. You should first call "
+ "vkGetPhysicalDeviceQueueFamilyProperties() w/ NULL "
+ "pQueueFamilyProperties to query pCount.");
}
- // Then verify that pCount that is passed in on second call matches what was returned
- if (phy_dev_data->physicalDeviceState->queueFamilyPropertiesCount != *pCount) {
+ // Then verify that pCount that is passed in on second call matches
+ // what was returned
+ if (phy_dev_data->physicalDeviceState->queueFamilyPropertiesCount !=
+ *pCount) {
- // TODO: this is not a requirement of the Valid Usage section for vkGetPhysicalDeviceQueueFamilyProperties, so provide as warning
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_COUNT_MISMATCH, "DL",
- "Call to vkGetPhysicalDeviceQueueFamilyProperties() w/ pCount value %u, but actual count supported by this physicalDevice is %u.", *pCount, phy_dev_data->physicalDeviceState->queueFamilyPropertiesCount);
+ // TODO: this is not a requirement of the Valid Usage section
+ // for vkGetPhysicalDeviceQueueFamilyProperties, so provide as
+ // warning
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_COUNT_MISMATCH, "DL",
+ "Call to vkGetPhysicalDeviceQueueFamilyProperties() w/ "
+ "pCount value %u, but actual count supported by this "
+ "physicalDevice is %u.",
+ *pCount, phy_dev_data->physicalDeviceState
+ ->queueFamilyPropertiesCount);
}
- phy_dev_data->physicalDeviceState->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
+ phy_dev_data->physicalDeviceState
+ ->vkGetPhysicalDeviceQueueFamilyPropertiesState = QUERY_DETAILS;
}
if (skipCall)
return;
- phy_dev_data->instance_dispatch_table->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pCount, pQueueFamilyProperties);
+ phy_dev_data->instance_dispatch_table
+ ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pCount,
+ pQueueFamilyProperties);
if (NULL == pQueueFamilyProperties) {
- phy_dev_data->physicalDeviceState->queueFamilyPropertiesCount = *pCount;
+ phy_dev_data->physicalDeviceState->queueFamilyPropertiesCount =
+ *pCount;
} else { // Save queue family properties
phy_dev_data->queueFamilyProperties.reserve(*pCount);
- for (uint32_t i=0; i < *pCount; i++) {
- phy_dev_data->queueFamilyProperties.emplace_back(new VkQueueFamilyProperties(pQueueFamilyProperties[i]));
+ for (uint32_t i = 0; i < *pCount; i++) {
+ phy_dev_data->queueFamilyProperties.emplace_back(
+ new VkQueueFamilyProperties(pQueueFamilyProperties[i]));
}
}
return;
} else {
- log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_PHYSICAL_DEVICE, "DL",
- "Invalid physicalDevice (%#" PRIxLEAST64 ") passed into vkGetPhysicalDeviceQueueFamilyProperties().", (uint64_t)physicalDevice);
+ log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__,
+ DEVLIMITS_INVALID_PHYSICAL_DEVICE, "DL",
+ "Invalid physicalDevice (%#" PRIxLEAST64
+ ") passed into vkGetPhysicalDeviceQueueFamilyProperties().",
+ (uint64_t)physicalDevice);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties)
-{
- get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map)->instance_dispatch_table->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map)
+ ->instance_dispatch_table->GetPhysicalDeviceMemoryProperties(
+ physicalDevice, pMemoryProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties)
-{
- get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map)->instance_dispatch_table->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pNumProperties, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceSparseImageFormatProperties(
+ VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+ VkSampleCountFlagBits samples, VkImageUsageFlags usage,
+ VkImageTiling tiling, uint32_t *pNumProperties,
+ VkSparseImageFormatProperties *pProperties) {
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map)
+ ->instance_dispatch_table->GetPhysicalDeviceSparseImageFormatProperties(
+ physicalDevice, format, type, samples, usage, tiling,
+ pNumProperties, pProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+ uint32_t viewportCount, const VkViewport *pViewports) {
VkBool32 skipCall = VK_FALSE;
/* TODO: Verify viewportCount < maxViewports from VkPhysicalDeviceLimits */
if (VK_FALSE == skipCall) {
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- my_data->device_dispatch_table->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ my_data->device_dispatch_table->CmdSetViewport(
+ commandBuffer, firstViewport, viewportCount, pViewports);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
+ uint32_t scissorCount, const VkRect2D *pScissors) {
VkBool32 skipCall = VK_FALSE;
/* TODO: Verify scissorCount < maxViewports from VkPhysicalDeviceLimits */
/* TODO: viewportCount and scissorCount must match at draw time */
if (VK_FALSE == skipCall) {
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- my_data->device_dispatch_table->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ my_data->device_dispatch_table->CmdSetScissor(
+ commandBuffer, firstScissor, scissorCount, pScissors);
}
}
-static void createDeviceRegisterExtensions(const VkDeviceCreateInfo* pCreateInfo, VkDevice device)
-{
+static void
+createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo,
+ VkDevice device) {
uint32_t i;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
my_data->device_extensions.debug_marker_enabled = false;
for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], DEBUG_MARKER_EXTENSION_NAME) == 0) {
- /* Found a matching extension name, mark it enabled and init dispatch table*/
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ DEBUG_MARKER_EXTENSION_NAME) == 0) {
+ /* Found a matching extension name, mark it enabled and init
+ * dispatch table*/
initDebugMarkerTable(device);
my_data->device_extensions.debug_marker_enabled = true;
}
-
}
}
-// Verify that features have been queried and verify that requested features are available
-static VkBool32 validate_features_request(layer_data *phy_dev_data)
-{
+// Verify that features have been queried and verify that requested features are
+// available
+static VkBool32 validate_features_request(layer_data *phy_dev_data) {
VkBool32 skipCall = VK_FALSE;
// Verify that all of the requested features are available
- // Get ptrs into actual and requested structs and if requested is 1 but actual is 0, request is invalid
- VkBool32* actual = (VkBool32*)&(phy_dev_data->actualPhysicalDeviceFeatures);
- VkBool32* requested = (VkBool32*)&(phy_dev_data->requestedPhysicalDeviceFeatures);
- // TODO : This is a nice, compact way to loop through struct, but a bad way to report issues
- // Need to provide the struct member name with the issue. To do that seems like we'll
- // have to loop through each struct member which should be done w/ codegen to keep in synch.
+ // Get ptrs into actual and requested structs and if requested is 1 but
+ // actual is 0, request is invalid
+ VkBool32 *actual =
+ (VkBool32 *)&(phy_dev_data->actualPhysicalDeviceFeatures);
+ VkBool32 *requested =
+ (VkBool32 *)&(phy_dev_data->requestedPhysicalDeviceFeatures);
+ // TODO : This is a nice, compact way to loop through struct, but a bad way
+ // to report issues
+ // Need to provide the struct member name with the issue. To do that seems
+ // like we'll
+ // have to loop through each struct member which should be done w/ codegen
+ // to keep in synch.
uint32_t errors = 0;
- uint32_t totalBools = sizeof(VkPhysicalDeviceFeatures)/sizeof(VkBool32);
+ uint32_t totalBools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
for (uint32_t i = 0; i < totalBools; i++) {
if (requested[i] > actual[i]) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_FEATURE_REQUESTED, "DL",
- "While calling vkCreateDevice(), requesting feature #%u in VkPhysicalDeviceFeatures struct, which is not available on this device.", i);
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__,
+ DEVLIMITS_INVALID_FEATURE_REQUESTED, "DL",
+ "While calling vkCreateDevice(), requesting feature #%u in "
+ "VkPhysicalDeviceFeatures struct, which is not available on "
+ "this device.",
+ i);
errors++;
}
}
- if (errors && (UNCALLED == phy_dev_data->physicalDeviceState->vkGetPhysicalDeviceFeaturesState)) {
+ if (errors &&
+ (UNCALLED ==
+ phy_dev_data->physicalDeviceState->vkGetPhysicalDeviceFeaturesState)) {
// If user didn't request features, notify them that they should
- // TODO: Verify this against the spec. I believe this is an invalid use of the API and should return an error
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_FEATURE_REQUESTED, "DL",
- "You requested features that are unavailable on this device. You should first query feature availability by calling vkGetPhysicalDeviceFeatures().");
+ // TODO: Verify this against the spec. I believe this is an invalid use
+ // of the API and should return an error
+ skipCall |=
+ log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_FEATURE_REQUESTED, "DL",
+ "You requested features that are unavailable on this "
+ "device. You should first query feature availability by "
+ "calling vkGetPhysicalDeviceFeatures().");
}
return skipCall;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
VkBool32 skipCall = VK_FALSE;
- layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
+ layer_data *phy_dev_data =
+ get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
// First check is app has actually requested queueFamilyProperties
if (!phy_dev_data->physicalDeviceState) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
- "Invalid call to vkCreateDevice() w/o first calling vkEnumeratePhysicalDevices().");
- } else if (QUERY_DETAILS != phy_dev_data->physicalDeviceState->vkGetPhysicalDeviceQueueFamilyPropertiesState) {
- // TODO: This is not called out as an invalid use in the spec so make more informative recommendation.
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
- "Call to vkCreateDevice() w/o first calling vkGetPhysicalDeviceQueueFamilyProperties().");
+ skipCall |=
+ log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
+ "Invalid call to vkCreateDevice() w/o first calling "
+ "vkEnumeratePhysicalDevices().");
+ } else if (QUERY_DETAILS !=
+ phy_dev_data->physicalDeviceState
+ ->vkGetPhysicalDeviceQueueFamilyPropertiesState) {
+ // TODO: This is not called out as an invalid use in the spec so make
+ // more informative recommendation.
+ skipCall |=
+ log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
+ "Call to vkCreateDevice() w/o first calling "
+ "vkGetPhysicalDeviceQueueFamilyProperties().");
} else {
// Check that the requested queue properties are valid
- for (uint32_t i=0; i<pCreateInfo->queueCreateInfoCount; i++) {
- uint32_t requestedIndex = pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex;
- if (phy_dev_data->queueFamilyProperties.size() <= requestedIndex) { // requested index is out of bounds for this physical device
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
- "Invalid queue create request in vkCreateDevice(). Invalid queueFamilyIndex %u requested.", requestedIndex);
- } else if (pCreateInfo->pQueueCreateInfos[i].queueCount > phy_dev_data->queueFamilyProperties[requestedIndex]->queueCount) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
- "Invalid queue create request in vkCreateDevice(). QueueFamilyIndex %u only has %u queues, but requested queueCount is %u.", requestedIndex, phy_dev_data->queueFamilyProperties[requestedIndex]->queueCount, pCreateInfo->pQueueCreateInfos[i].queueCount);
+ for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
+ uint32_t requestedIndex =
+ pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex;
+ if (phy_dev_data->queueFamilyProperties.size() <=
+ requestedIndex) { // requested index is out of bounds for this
+ // physical device
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
+ "Invalid queue create request in vkCreateDevice(). Invalid "
+ "queueFamilyIndex %u requested.",
+ requestedIndex);
+ } else if (pCreateInfo->pQueueCreateInfos[i].queueCount >
+ phy_dev_data->queueFamilyProperties[requestedIndex]
+ ->queueCount) {
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
+ "Invalid queue create request in vkCreateDevice(). "
+ "QueueFamilyIndex %u only has %u queues, but requested "
+ "queueCount is %u.",
+ requestedIndex,
+ phy_dev_data->queueFamilyProperties[requestedIndex]
+ ->queueCount,
+ pCreateInfo->pQueueCreateInfos[i].queueCount);
}
}
}
// Check that any requested features are available
if (pCreateInfo->pEnabledFeatures) {
- phy_dev_data->requestedPhysicalDeviceFeatures = *(pCreateInfo->pEnabledFeatures);
+ phy_dev_data->requestedPhysicalDeviceFeatures =
+ *(pCreateInfo->pEnabledFeatures);
skipCall |= validate_features_request(phy_dev_data);
}
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+ VkLayerDeviceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+ PFN_vkCreateDevice fpCreateDevice =
+ (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
if (fpCreateDevice == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -487,21 +634,26 @@
return result;
}
- layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
+ layer_data *my_instance_data =
+ get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
my_device_data->device_dispatch_table = new VkLayerDispatchTable;
- layer_init_device_dispatch_table(*pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
- my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
+ layer_init_device_dispatch_table(
+ *pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
+ my_device_data->report_data = layer_debug_report_create_device(
+ my_instance_data->report_data, *pDevice);
my_device_data->physicalDevice = gpu;
createDeviceRegisterExtensions(pCreateInfo, *pDevice);
// Get physical device properties for this device
- phy_dev_data->instance_dispatch_table->GetPhysicalDeviceProperties(gpu, &(phy_dev_data->physDevPropertyMap[*pDevice]));
+ phy_dev_data->instance_dispatch_table->GetPhysicalDeviceProperties(
+ gpu, &(phy_dev_data->physDevPropertyMap[*pDevice]));
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
// Free device lifetime allocations
dispatch_key key = get_dispatch_key(device);
layer_data *my_device_data = get_my_data_ptr(key, layer_data_map);
@@ -511,248 +663,331 @@
layer_data_map.erase(key);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateCommandPool(VkDevice device,
+ const VkCommandPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkCommandPool *pCommandPool) {
// TODO : Verify that requested QueueFamilyIndex for this pool exists
- VkResult result = get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+ VkResult result = get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->CreateCommandPool(
+ device, pCreateInfo, pAllocator, pCommandPool);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyCommandPool(device, commandPool, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyCommandPool(device, commandPool,
+ pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
-{
- VkResult result = get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->ResetCommandPool(device, commandPool, flags);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetCommandPool(VkDevice device, VkCommandPool commandPool,
+ VkCommandPoolResetFlags flags) {
+ VkResult result = get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->ResetCommandPool(
+ device, commandPool, flags);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pCreateInfo, VkCommandBuffer* pCommandBuffer)
-{
- VkResult result = get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->AllocateCommandBuffers(device, pCreateInfo, pCommandBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAllocateCommandBuffers(VkDevice device,
+ const VkCommandBufferAllocateInfo *pCreateInfo,
+ VkCommandBuffer *pCommandBuffer) {
+ VkResult result = get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->AllocateCommandBuffers(
+ device, pCreateInfo, pCommandBuffer);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t count, const VkCommandBuffer* pCommandBuffers)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->FreeCommandBuffers(device, commandPool, count, pCommandBuffers);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
+ uint32_t count,
+ const VkCommandBuffer *pCommandBuffers) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->FreeCommandBuffers(device, commandPool, count,
+ pCommandBuffers);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex,
+ uint32_t queueIndex, VkQueue *pQueue) {
VkBool32 skipCall = VK_FALSE;
- layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkPhysicalDevice gpu = dev_data->physicalDevice;
- layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
- if (queueFamilyIndex >= phy_dev_data->queueFamilyProperties.size()) { // requested index is out of bounds for this physical device
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
- "Invalid queueFamilyIndex %u requested in vkGetDeviceQueue().", queueFamilyIndex);
- } else if (queueIndex >= phy_dev_data->queueFamilyProperties[queueFamilyIndex]->queueCount) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
- "Invalid queue request in vkGetDeviceQueue(). QueueFamilyIndex %u only has %u queues, but requested queueIndex is %u.", queueFamilyIndex, phy_dev_data->queueFamilyProperties[queueFamilyIndex]->queueCount, queueIndex);
+ layer_data *phy_dev_data =
+ get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
+ if (queueFamilyIndex >=
+ phy_dev_data->queueFamilyProperties.size()) { // requested index is out
+ // of bounds for this
+ // physical device
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__,
+ DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
+ "Invalid queueFamilyIndex %u requested in vkGetDeviceQueue().",
+ queueFamilyIndex);
+ } else if (queueIndex >=
+ phy_dev_data->queueFamilyProperties[queueFamilyIndex]
+ ->queueCount) {
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__,
+ DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
+ "Invalid queue request in vkGetDeviceQueue(). QueueFamilyIndex %u "
+ "only has %u queues, but requested queueIndex is %u.",
+ queueFamilyIndex,
+ phy_dev_data->queueFamilyProperties[queueFamilyIndex]->queueCount,
+ queueIndex);
}
if (skipCall)
return;
- dev_data->device_dispatch_table->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+ dev_data->device_dispatch_table->GetDeviceQueue(device, queueFamilyIndex,
+ queueIndex, pQueue);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory mem,
- VkDeviceSize memoryOffset)
-{
- layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
- VkBool32 skipCall = VK_FALSE;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+ VkBool32 skipCall = VK_FALSE;
- VkDeviceSize uniformAlignment = dev_data->physDevPropertyMap[device].limits.minUniformBufferOffsetAlignment;
+ VkDeviceSize uniformAlignment = dev_data->physDevPropertyMap[device]
+ .limits.minUniformBufferOffsetAlignment;
if (vk_safe_modulo(memoryOffset, uniformAlignment) != 0) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
- __LINE__, DEVLIMITS_INVALID_UNIFORM_BUFFER_OFFSET, "DL",
- "vkBindBufferMemory(): memoryOffset %#" PRIxLEAST64 " must be a multiple of device limit minUniformBufferOffsetAlignment %#" PRIxLEAST64,
- memoryOffset, uniformAlignment);
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_UNIFORM_BUFFER_OFFSET, "DL",
+ "vkBindBufferMemory(): memoryOffset %#" PRIxLEAST64
+ " must be a multiple of device limit "
+ "minUniformBufferOffsetAlignment %#" PRIxLEAST64,
+ memoryOffset, uniformAlignment);
}
if (VK_FALSE == skipCall) {
- result = dev_data->device_dispatch_table->BindBufferMemory(device, buffer, mem, memoryOffset);
+ result = dev_data->device_dispatch_table->BindBufferMemory(
+ device, buffer, mem, memoryOffset);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet *pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet *pDescriptorCopies)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
- VkBool32 skipCall = VK_FALSE;
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet *pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet *pDescriptorCopies) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+ VkBool32 skipCall = VK_FALSE;
for (uint32_t i = 0; i < descriptorWriteCount; i++) {
- if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
- (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)) {
- VkDeviceSize uniformAlignment = dev_data->physDevPropertyMap[device].limits.minUniformBufferOffsetAlignment;
- for (uint32_t j = 0; j < pDescriptorWrites[i].descriptorCount; j++) {
- if (vk_safe_modulo(pDescriptorWrites[i].pBufferInfo[j].offset, uniformAlignment) != 0) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
- __LINE__, DEVLIMITS_INVALID_UNIFORM_BUFFER_OFFSET, "DL",
- "vkUpdateDescriptorSets(): pDescriptorWrites[%d].pBufferInfo[%d].offset (%#" PRIxLEAST64 ") must be a multiple of device limit minUniformBufferOffsetAlignment %#" PRIxLEAST64,
- i, j, pDescriptorWrites[i].pBufferInfo[j].offset, uniformAlignment);
+ if ((pDescriptorWrites[i].descriptorType ==
+ VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
+ (pDescriptorWrites[i].descriptorType ==
+ VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)) {
+ VkDeviceSize uniformAlignment =
+ dev_data->physDevPropertyMap[device]
+ .limits.minUniformBufferOffsetAlignment;
+ for (uint32_t j = 0; j < pDescriptorWrites[i].descriptorCount;
+ j++) {
+ if (vk_safe_modulo(pDescriptorWrites[i].pBufferInfo[j].offset,
+ uniformAlignment) != 0) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_UNIFORM_BUFFER_OFFSET, "DL",
+ "vkUpdateDescriptorSets(): "
+ "pDescriptorWrites[%d].pBufferInfo[%d].offset "
+ "(%#" PRIxLEAST64
+ ") must be a multiple of device limit "
+ "minUniformBufferOffsetAlignment %#" PRIxLEAST64,
+ i, j, pDescriptorWrites[i].pBufferInfo[j].offset,
+ uniformAlignment);
}
}
- } else if ((pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
- (pDescriptorWrites[i].descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
- VkDeviceSize storageAlignment = dev_data->physDevPropertyMap[device].limits.minStorageBufferOffsetAlignment;
- for (uint32_t j = 0; j < pDescriptorWrites[i].descriptorCount; j++) {
- if (vk_safe_modulo(pDescriptorWrites[i].pBufferInfo[j].offset, storageAlignment) != 0) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
- __LINE__, DEVLIMITS_INVALID_STORAGE_BUFFER_OFFSET, "DL",
- "vkUpdateDescriptorSets(): pDescriptorWrites[%d].pBufferInfo[%d].offset (%#" PRIxLEAST64 ") must be a multiple of device limit minStorageBufferOffsetAlignment %#" PRIxLEAST64,
- i, j, pDescriptorWrites[i].pBufferInfo[j].offset, storageAlignment);
+ } else if ((pDescriptorWrites[i].descriptorType ==
+ VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
+ (pDescriptorWrites[i].descriptorType ==
+ VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+ VkDeviceSize storageAlignment =
+ dev_data->physDevPropertyMap[device]
+ .limits.minStorageBufferOffsetAlignment;
+ for (uint32_t j = 0; j < pDescriptorWrites[i].descriptorCount;
+ j++) {
+ if (vk_safe_modulo(pDescriptorWrites[i].pBufferInfo[j].offset,
+ storageAlignment) != 0) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_STORAGE_BUFFER_OFFSET, "DL",
+ "vkUpdateDescriptorSets(): "
+ "pDescriptorWrites[%d].pBufferInfo[%d].offset "
+ "(%#" PRIxLEAST64
+ ") must be a multiple of device limit "
+ "minStorageBufferOffsetAlignment %#" PRIxLEAST64,
+ i, j, pDescriptorWrites[i].pBufferInfo[j].offset,
+ storageAlignment);
}
}
}
}
if (skipCall == VK_FALSE) {
- dev_data->device_dispatch_table->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+ dev_data->device_dispatch_table->UpdateDescriptorSets(
+ device, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const uint32_t* pData)
-{
- layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize dataSize,
+ const uint32_t *pData) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- // dstOffset is the byte offset into the buffer to start updating and must be a multiple of 4.
+ // dstOffset is the byte offset into the buffer to start updating and must
+ // be a multiple of 4.
if (dstOffset & 3) {
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "DL",
- "vkCmdUpdateBuffer parameter, VkDeviceSize dstOffset, is not a multiple of 4")) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "DL",
+ "vkCmdUpdateBuffer parameter, VkDeviceSize dstOffset, is "
+ "not a multiple of 4")) {
return;
}
}
// dataSize is the number of bytes to update, which must be a multiple of 4.
if (dataSize & 3) {
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "DL",
- "vkCmdUpdateBuffer parameter, VkDeviceSize dataSize, is not a multiple of 4")) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "DL",
+ "vkCmdUpdateBuffer parameter, VkDeviceSize dataSize, is "
+ "not a multiple of 4")) {
return;
}
}
- dev_data->device_dispatch_table->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+ dev_data->device_dispatch_table->CmdUpdateBuffer(
+ commandBuffer, dstBuffer, dstOffset, dataSize, pData);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data)
-{
- layer_data *dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- // dstOffset is the byte offset into the buffer to start filling and must be a multiple of 4.
+ // dstOffset is the byte offset into the buffer to start filling and must be
+ // a multiple of 4.
if (dstOffset & 3) {
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "DL",
- "vkCmdFillBuffer parameter, VkDeviceSize dstOffset, is not a multiple of 4")) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "DL",
+ "vkCmdFillBuffer parameter, VkDeviceSize dstOffset, is not "
+ "a multiple of 4")) {
return;
}
}
// size is the number of bytes to fill, which must be a multiple of 4.
if (size & 3) {
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "DL",
- "vkCmdFillBuffer parameter, VkDeviceSize size, is not a multiple of 4")) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "DL",
+ "vkCmdFillBuffer parameter, VkDeviceSize size, is not a "
+ "multiple of 4")) {
return;
}
}
- dev_data->device_dispatch_table->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+ dev_data->device_dispatch_table->CmdFillBuffer(commandBuffer, dstBuffer,
+ dstOffset, size, data);
}
VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pMsgCallback)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- VkResult res = my_data->instance_dispatch_table->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
+ VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDebugReportCallbackEXT *pMsgCallback) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ VkResult res =
+ my_data->instance_dispatch_table->CreateDebugReportCallbackEXT(
+ instance, pCreateInfo, pAllocator, pMsgCallback);
if (VK_SUCCESS == res) {
- res = layer_create_msg_callback(my_data->report_data, pCreateInfo, pAllocator, pMsgCallback);
+ res = layer_create_msg_callback(my_data->report_data, pCreateInfo,
+ pAllocator, pMsgCallback);
}
return res;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT msgCallback,
- const VkAllocationCallbacks* pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- my_data->instance_dispatch_table->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDebugReportCallbackEXT(VkInstance instance,
+ VkDebugReportCallbackEXT msgCallback,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ my_data->instance_dispatch_table->DestroyDebugReportCallbackEXT(
+ instance, msgCallback, pAllocator);
layer_destroy_msg_callback(my_data->report_data, msgCallback, pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objType,
- uint64_t object,
- size_t location,
- int32_t msgCode,
- const char* pLayerPrefix,
- const char* pMsg)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- my_data->instance_dispatch_table->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
+ VkDebugReportObjectTypeEXT objType, uint64_t object,
+ size_t location, int32_t msgCode,
+ const char *pLayerPrefix, const char *pMsg) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ my_data->instance_dispatch_table->DebugReportMessageEXT(
+ instance, flags, objType, object, location, msgCode, pLayerPrefix,
+ pMsg);
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char* funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
if (!strcmp(funcName, "vkGetDeviceProcAddr"))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
if (!strcmp(funcName, "vkDestroyDevice"))
- return (PFN_vkVoidFunction) vkDestroyDevice;
+ return (PFN_vkVoidFunction)vkDestroyDevice;
if (!strcmp(funcName, "vkGetDeviceQueue"))
- return (PFN_vkVoidFunction) vkGetDeviceQueue;
+ return (PFN_vkVoidFunction)vkGetDeviceQueue;
if (!strcmp(funcName, "CreateCommandPool"))
- return (PFN_vkVoidFunction) vkCreateCommandPool;
+ return (PFN_vkVoidFunction)vkCreateCommandPool;
if (!strcmp(funcName, "DestroyCommandPool"))
- return (PFN_vkVoidFunction) vkDestroyCommandPool;
+ return (PFN_vkVoidFunction)vkDestroyCommandPool;
if (!strcmp(funcName, "ResetCommandPool"))
- return (PFN_vkVoidFunction) vkResetCommandPool;
+ return (PFN_vkVoidFunction)vkResetCommandPool;
if (!strcmp(funcName, "vkAllocateCommandBuffers"))
- return (PFN_vkVoidFunction) vkAllocateCommandBuffers;
+ return (PFN_vkVoidFunction)vkAllocateCommandBuffers;
if (!strcmp(funcName, "vkFreeCommandBuffers"))
- return (PFN_vkVoidFunction) vkFreeCommandBuffers;
+ return (PFN_vkVoidFunction)vkFreeCommandBuffers;
if (!strcmp(funcName, "vkCmdUpdateBuffer"))
- return (PFN_vkVoidFunction) vkCmdUpdateBuffer;
+ return (PFN_vkVoidFunction)vkCmdUpdateBuffer;
if (!strcmp(funcName, "vkBindBufferMemory"))
- return (PFN_vkVoidFunction) vkBindBufferMemory;
+ return (PFN_vkVoidFunction)vkBindBufferMemory;
if (!strcmp(funcName, "vkUpdateDescriptorSets"))
- return (PFN_vkVoidFunction) vkUpdateDescriptorSets;
+ return (PFN_vkVoidFunction)vkUpdateDescriptorSets;
if (!strcmp(funcName, "vkCmdFillBuffer"))
- return (PFN_vkVoidFunction) vkCmdFillBuffer;
+ return (PFN_vkVoidFunction)vkCmdFillBuffer;
if (dev == NULL)
return NULL;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(dev), layer_data_map);
- VkLayerDispatchTable* pTable = my_data->device_dispatch_table;
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(dev), layer_data_map);
+ VkLayerDispatchTable *pTable = my_data->device_dispatch_table;
{
if (pTable->GetDeviceProcAddr == NULL)
return NULL;
@@ -760,43 +995,45 @@
}
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
PFN_vkVoidFunction fptr;
layer_data *my_data;
if (!strcmp(funcName, "vkGetInstanceProcAddr"))
- return (PFN_vkVoidFunction) vkGetInstanceProcAddr;
+ return (PFN_vkVoidFunction)vkGetInstanceProcAddr;
if (!strcmp(funcName, "vkGetDeviceProcAddr"))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
if (!strcmp(funcName, "vkCreateInstance"))
- return (PFN_vkVoidFunction) vkCreateInstance;
+ return (PFN_vkVoidFunction)vkCreateInstance;
if (!strcmp(funcName, "vkDestroyInstance"))
- return (PFN_vkVoidFunction) vkDestroyInstance;
+ return (PFN_vkVoidFunction)vkDestroyInstance;
if (!strcmp(funcName, "vkCreateDevice"))
- return (PFN_vkVoidFunction) vkCreateDevice;
+ return (PFN_vkVoidFunction)vkCreateDevice;
if (!strcmp(funcName, "vkEnumeratePhysicalDevices"))
- return (PFN_vkVoidFunction) vkEnumeratePhysicalDevices;
+ return (PFN_vkVoidFunction)vkEnumeratePhysicalDevices;
if (!strcmp(funcName, "vkGetPhysicalDeviceFeatures"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceFeatures;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceFeatures;
if (!strcmp(funcName, "vkGetPhysicalDeviceFormatProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceFormatProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceFormatProperties;
if (!strcmp(funcName, "vkGetPhysicalDeviceImageFormatProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceImageFormatProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceImageFormatProperties;
if (!strcmp(funcName, "vkGetPhysicalDeviceProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceProperties;
if (!strcmp(funcName, "vkGetPhysicalDeviceQueueFamilyProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceQueueFamilyProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceQueueFamilyProperties;
if (!strcmp(funcName, "vkGetPhysicalDeviceMemoryProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceMemoryProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceMemoryProperties;
if (!strcmp(funcName, "vkGetPhysicalDeviceSparseImageFormatProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceSparseImageFormatProperties;
+ return (
+ PFN_vkVoidFunction)vkGetPhysicalDeviceSparseImageFormatProperties;
if (!strcmp(funcName, "vkEnumerateInstanceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceLayerProperties;
if (!strcmp(funcName, "vkEnumerateInstanceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceExtensionProperties;
- if (!instance) return NULL;
+ if (!instance)
+ return NULL;
my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
@@ -805,7 +1042,7 @@
return fptr;
{
- VkLayerInstanceDispatchTable* pTable = my_data->instance_dispatch_table;
+ VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
if (pTable->GetInstanceProcAddr == NULL)
return NULL;
return pTable->GetInstanceProcAddr(instance, funcName);
diff --git a/layers/device_limits.h b/layers/device_limits.h
index ac6e23e..e35ef63 100644
--- a/layers/device_limits.h
+++ b/layers/device_limits.h
@@ -36,39 +36,45 @@
using namespace std;
// Device Limits ERROR codes
-typedef enum _DEV_LIMITS_ERROR
-{
- DEVLIMITS_NONE, // Used for INFO & other non-error messages
- DEVLIMITS_INVALID_INSTANCE, // Invalid instance used
- DEVLIMITS_INVALID_PHYSICAL_DEVICE, // Invalid physical device used
- DEVLIMITS_MUST_QUERY_COUNT, // Failed to make initial call to an API to query the count
- DEVLIMITS_MUST_QUERY_PROPERTIES, // Failed to make initial call to an API to query properties
- DEVLIMITS_INVALID_CALL_SEQUENCE, // Flag generic case of an invalid call sequence by the app
- DEVLIMITS_INVALID_FEATURE_REQUESTED, // App requested a feature not supported by physical device
- DEVLIMITS_COUNT_MISMATCH, // App requesting a count value different than actual value
- DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, // Invalid queue requested based on queue family properties
- DEVLIMITS_LIMITS_VIOLATION, // Driver-specified limits/properties were exceeded
- DEVLIMITS_INVALID_UNIFORM_BUFFER_OFFSET, // Uniform buffer offset violates device limit granularity
- DEVLIMITS_INVALID_STORAGE_BUFFER_OFFSET, // Storage buffer offset violates device limit granularity
+typedef enum _DEV_LIMITS_ERROR {
+ DEVLIMITS_NONE, // Used for INFO & other non-error messages
+ DEVLIMITS_INVALID_INSTANCE, // Invalid instance used
+ DEVLIMITS_INVALID_PHYSICAL_DEVICE, // Invalid physical device used
+ DEVLIMITS_MUST_QUERY_COUNT, // Failed to make initial call to an API to
+ // query the count
+ DEVLIMITS_MUST_QUERY_PROPERTIES, // Failed to make initial call to an API to
+ // query properties
+ DEVLIMITS_INVALID_CALL_SEQUENCE, // Flag generic case of an invalid call
+ // sequence by the app
+ DEVLIMITS_INVALID_FEATURE_REQUESTED, // App requested a feature not
+ // supported by physical device
+ DEVLIMITS_COUNT_MISMATCH, // App requesting a count value different than
+ // actual value
+ DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, // Invalid queue requested based on
+ // queue family properties
+ DEVLIMITS_LIMITS_VIOLATION, // Driver-specified limits/properties were
+ // exceeded
+ DEVLIMITS_INVALID_UNIFORM_BUFFER_OFFSET, // Uniform buffer offset violates
+ // device limit granularity
+ DEVLIMITS_INVALID_STORAGE_BUFFER_OFFSET, // Storage buffer offset violates
+ // device limit granularity
} DEV_LIMITS_ERROR;
-typedef enum _CALL_STATE
-{
- UNCALLED, // Function has not been called
- QUERY_COUNT, // Function called once to query a count
- QUERY_DETAILS, // Function called w/ a count to query details
+typedef enum _CALL_STATE {
+ UNCALLED, // Function has not been called
+ QUERY_COUNT, // Function called once to query a count
+ QUERY_DETAILS, // Function called w/ a count to query details
} CALL_STATE;
-typedef struct _INSTANCE_STATE
-{
+typedef struct _INSTANCE_STATE {
// Track the call state and array size for physical devices
CALL_STATE vkEnumeratePhysicalDevicesState;
uint32_t physicalDevicesCount;
- _INSTANCE_STATE():vkEnumeratePhysicalDevicesState(UNCALLED), physicalDevicesCount(0) {};
+ _INSTANCE_STATE()
+ : vkEnumeratePhysicalDevicesState(UNCALLED), physicalDevicesCount(0){};
} INSTANCE_STATE;
-typedef struct _PHYSICAL_DEVICE_STATE
-{
+typedef struct _PHYSICAL_DEVICE_STATE {
// Track the call state and array sizes for various query functions
CALL_STATE vkGetPhysicalDeviceQueueFamilyPropertiesState;
uint32_t queueFamilyPropertiesCount;
@@ -77,9 +83,11 @@
CALL_STATE vkGetPhysicalDeviceExtensionPropertiesState;
uint32_t deviceExtensionCount;
CALL_STATE vkGetPhysicalDeviceFeaturesState;
- _PHYSICAL_DEVICE_STATE():vkGetPhysicalDeviceQueueFamilyPropertiesState(UNCALLED), queueFamilyPropertiesCount(0),
- vkGetPhysicalDeviceLayerPropertiesState(UNCALLED), deviceLayerCount(0),
- vkGetPhysicalDeviceExtensionPropertiesState(UNCALLED), deviceExtensionCount(0),
- vkGetPhysicalDeviceFeaturesState(UNCALLED) {};
+ _PHYSICAL_DEVICE_STATE()
+ : vkGetPhysicalDeviceQueueFamilyPropertiesState(UNCALLED),
+ queueFamilyPropertiesCount(0),
+ vkGetPhysicalDeviceLayerPropertiesState(UNCALLED),
+ deviceLayerCount(0),
+ vkGetPhysicalDeviceExtensionPropertiesState(UNCALLED),
+ deviceExtensionCount(0), vkGetPhysicalDeviceFeaturesState(UNCALLED){};
} PHYSICAL_DEVICE_STATE;
-
diff --git a/layers/draw_state.cpp b/layers/draw_state.cpp
index 5434afc..e569b5d 100644
--- a/layers/draw_state.cpp
+++ b/layers/draw_state.cpp
@@ -71,7 +71,8 @@
#include "vk_layer_extension_utils.h"
#include "vk_layer_utils.h"
-// This definition controls whether image layout transitions are enabled/disabled.
+// This definition controls whether image layout transitions are
+// enabled/disabled.
// disable until corner cases are fixed
#define DISABLE_IMAGE_LAYOUT_VALIDATION
@@ -80,14 +81,15 @@
// Track command pools and their command buffers
struct CMD_POOL_INFO {
- VkCommandPoolCreateFlags createFlags;
- list<VkCommandBuffer> commandBuffers; // list container of cmd buffers allocated from this pool
+ VkCommandPoolCreateFlags createFlags;
+ list<VkCommandBuffer> commandBuffers; // list container of cmd buffers
+ // allocated from this pool
};
struct devExts {
VkBool32 debug_marker_enabled;
VkBool32 wsi_enabled;
- unordered_map<VkSwapchainKHR, SWAPCHAIN_NODE*> swapchainMap;
+ unordered_map<VkSwapchainKHR, SWAPCHAIN_NODE *> swapchainMap;
};
// fwd decls
@@ -95,112 +97,118 @@
struct render_pass;
struct layer_data {
- debug_report_data* report_data;
+ debug_report_data *report_data;
std::vector<VkDebugReportCallbackEXT> logging_callback;
- VkLayerDispatchTable* device_dispatch_table;
- VkLayerInstanceDispatchTable* instance_dispatch_table;
+ VkLayerDispatchTable *device_dispatch_table;
+ VkLayerInstanceDispatchTable *instance_dispatch_table;
devExts device_extensions;
vector<VkQueue> queues; // all queues under given device
// Global set of all cmdBuffers that are inFlight on this device
unordered_set<VkCommandBuffer> globalInFlightCmdBuffers;
// Layer specific data
- unordered_map<VkSampler, unique_ptr<SAMPLER_NODE>> sampleMap;
- unordered_map<VkImageView, unique_ptr<VkImageViewCreateInfo>> imageViewMap;
- unordered_map<VkImage, unique_ptr<VkImageCreateInfo>> imageMap;
- unordered_map<VkBufferView, unique_ptr<VkBufferViewCreateInfo>> bufferViewMap;
- unordered_map<VkBuffer, BUFFER_NODE> bufferMap;
- unordered_map<VkPipeline, PIPELINE_NODE*> pipelineMap;
- unordered_map<VkCommandPool, CMD_POOL_INFO> commandPoolMap;
- unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE*> descriptorPoolMap;
- unordered_map<VkDescriptorSet, SET_NODE*> setMap;
- unordered_map<VkDescriptorSetLayout, LAYOUT_NODE*> descriptorSetLayoutMap;
- unordered_map<VkPipelineLayout, PIPELINE_LAYOUT_NODE> pipelineLayoutMap;
- unordered_map<VkDeviceMemory, VkImage> memImageMap;
- unordered_map<VkFence, FENCE_NODE> fenceMap;
- unordered_map<VkQueue, QUEUE_NODE> queueMap;
- unordered_map<VkEvent, EVENT_NODE> eventMap;
- unordered_map<QueryObject, bool> queryToStateMap;
- unordered_map<VkSemaphore, uint32_t> semaphoreSignaledMap;
- unordered_map<void*, GLOBAL_CB_NODE*> commandBufferMap;
- unordered_map<VkFramebuffer, VkFramebufferCreateInfo*> frameBufferMap;
- unordered_map<VkImage, IMAGE_NODE*> imageLayoutMap;
- unordered_map<VkRenderPass, RENDER_PASS_NODE*> renderPassMap;
- unordered_map<VkShaderModule, shader_module*> shaderModuleMap;
+ unordered_map<VkSampler, unique_ptr<SAMPLER_NODE>> sampleMap;
+ unordered_map<VkImageView, unique_ptr<VkImageViewCreateInfo>> imageViewMap;
+ unordered_map<VkImage, unique_ptr<VkImageCreateInfo>> imageMap;
+ unordered_map<VkBufferView, unique_ptr<VkBufferViewCreateInfo>>
+ bufferViewMap;
+ unordered_map<VkBuffer, BUFFER_NODE> bufferMap;
+ unordered_map<VkPipeline, PIPELINE_NODE *> pipelineMap;
+ unordered_map<VkCommandPool, CMD_POOL_INFO> commandPoolMap;
+ unordered_map<VkDescriptorPool, DESCRIPTOR_POOL_NODE *> descriptorPoolMap;
+ unordered_map<VkDescriptorSet, SET_NODE *> setMap;
+ unordered_map<VkDescriptorSetLayout, LAYOUT_NODE *> descriptorSetLayoutMap;
+ unordered_map<VkPipelineLayout, PIPELINE_LAYOUT_NODE> pipelineLayoutMap;
+ unordered_map<VkDeviceMemory, VkImage> memImageMap;
+ unordered_map<VkFence, FENCE_NODE> fenceMap;
+ unordered_map<VkQueue, QUEUE_NODE> queueMap;
+ unordered_map<VkEvent, EVENT_NODE> eventMap;
+ unordered_map<QueryObject, bool> queryToStateMap;
+ unordered_map<VkSemaphore, uint32_t> semaphoreSignaledMap;
+ unordered_map<void *, GLOBAL_CB_NODE *> commandBufferMap;
+ unordered_map<VkFramebuffer, VkFramebufferCreateInfo *> frameBufferMap;
+ unordered_map<VkImage, IMAGE_NODE *> imageLayoutMap;
+ unordered_map<VkRenderPass, RENDER_PASS_NODE *> renderPassMap;
+ unordered_map<VkShaderModule, shader_module *> shaderModuleMap;
// Current render pass
- VkRenderPassBeginInfo renderPassBeginInfo;
- uint32_t currentSubpass;
- unordered_map<VkDevice, VkPhysicalDeviceProperties> physDevPropertyMap;
+ VkRenderPassBeginInfo renderPassBeginInfo;
+ uint32_t currentSubpass;
+ unordered_map<VkDevice, VkPhysicalDeviceProperties> physDevPropertyMap;
- layer_data() :
- report_data(nullptr),
- device_dispatch_table(nullptr),
- instance_dispatch_table(nullptr),
- device_extensions()
- {};
+ layer_data()
+ : report_data(nullptr), device_dispatch_table(nullptr),
+ instance_dispatch_table(nullptr), device_extensions(){};
};
// Code imported from ShaderChecker
-static void
-build_def_index(shader_module *);
+static void build_def_index(shader_module *);
-// A forward iterator over spirv instructions. Provides easy access to len, opcode, and content words
-// without the caller needing to care too much about the physical SPIRV module layout.
+// A forward iterator over spirv instructions. Provides easy access to len,
+// opcode, and content words
+// without the caller needing to care too much about the physical SPIRV module
+// layout.
struct spirv_inst_iter {
std::vector<uint32_t>::const_iterator zero;
std::vector<uint32_t>::const_iterator it;
uint32_t len() { return *it >> 16; }
uint32_t opcode() { return *it & 0x0ffffu; }
- uint32_t const & word(unsigned n) { return it[n]; }
+ uint32_t const &word(unsigned n) { return it[n]; }
uint32_t offset() { return (uint32_t)(it - zero); }
spirv_inst_iter(std::vector<uint32_t>::const_iterator zero,
- std::vector<uint32_t>::const_iterator it) : zero(zero), it(it) {}
+ std::vector<uint32_t>::const_iterator it)
+ : zero(zero), it(it) {}
- bool operator== (spirv_inst_iter const & other) {
- return it == other.it;
- }
+ bool operator==(spirv_inst_iter const &other) { return it == other.it; }
- bool operator!= (spirv_inst_iter const & other) {
- return it != other.it;
- }
+ bool operator!=(spirv_inst_iter const &other) { return it != other.it; }
- spirv_inst_iter operator++ (int) { /* x++ */
+ spirv_inst_iter operator++(int) { /* x++ */
spirv_inst_iter ii = *this;
it += len();
return ii;
}
- spirv_inst_iter operator++ () { /* ++x; */
+ spirv_inst_iter operator++() { /* ++x; */
it += len();
return *this;
}
/* The iterator and the value are the same thing. */
- spirv_inst_iter & operator* () { return *this; }
- spirv_inst_iter const & operator* () const { return *this; }
+ spirv_inst_iter &operator*() { return *this; }
+ spirv_inst_iter const &operator*() const { return *this; }
};
struct shader_module {
/* the spirv image itself */
vector<uint32_t> words;
- /* a mapping of <id> to the first word of its def. this is useful because walking type
- * trees, constant expressions, etc requires jumping all over the instruction stream.
+ /* a mapping of <id> to the first word of its def. this is useful because
+ * walking type
+ * trees, constant expressions, etc requires jumping all over the
+ * instruction stream.
*/
unordered_map<unsigned, unsigned> def_index;
- shader_module(VkShaderModuleCreateInfo const *pCreateInfo) :
- words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
- def_index() {
+ shader_module(VkShaderModuleCreateInfo const *pCreateInfo)
+ : words((uint32_t *)pCreateInfo->pCode,
+ (uint32_t *)pCreateInfo->pCode +
+ pCreateInfo->codeSize / sizeof(uint32_t)),
+ def_index() {
build_def_index(this);
}
/* expose begin() / end() to enable range-based for */
- spirv_inst_iter begin() const { return spirv_inst_iter(words.begin(), words.begin() + 5); } /* first insn */
- spirv_inst_iter end() const { return spirv_inst_iter(words.begin(), words.end()); } /* just past last insn */
+ spirv_inst_iter begin() const {
+ return spirv_inst_iter(words.begin(), words.begin() + 5);
+ } /* first insn */
+ spirv_inst_iter end() const {
+ return spirv_inst_iter(words.begin(), words.end());
+ } /* just past last insn */
/* given an offset into the module, produce an iterator there. */
- spirv_inst_iter at(unsigned offset) const { return spirv_inst_iter(words.begin(), words.begin() + offset); }
+ spirv_inst_iter at(unsigned offset) const {
+ return spirv_inst_iter(words.begin(), words.begin() + offset);
+ }
/* gets an iterator to the definition of an id */
spirv_inst_iter get_def(unsigned id) const {
@@ -213,19 +221,20 @@
};
// TODO : Do we need to guard access to layer_data_map w/ lock?
-static unordered_map<void*, layer_data*> layer_data_map;
+static unordered_map<void *, layer_data *> layer_data_map;
static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
-// TODO : This can be much smarter, using separate locks for separate global data
+// TODO : This can be much smarter, using separate locks for separate global
+// data
static int globalLockInitialized = 0;
static loader_platform_thread_mutex globalLock;
#define MAX_TID 513
static loader_platform_thread_id g_tidMapping[MAX_TID] = {0};
static uint32_t g_maxTID = 0;
-template layer_data *get_my_data_ptr<layer_data>(
- void *data_key,
- std::unordered_map<void *, layer_data *> &data_map);
+template layer_data *
+get_my_data_ptr<layer_data>(void *data_key,
+ std::unordered_map<void *, layer_data *> &data_map);
// Map actual TID to an index value and return that index
// This keeps TIDs in range from 0-MAX_TID and simplifies compares between runs
@@ -236,120 +245,116 @@
return i;
}
// Don't yet have mapping, set it and return newly set index
- uint32_t retVal = (uint32_t) g_maxTID;
+ uint32_t retVal = (uint32_t)g_maxTID;
g_tidMapping[g_maxTID++] = tid;
assert(g_maxTID < MAX_TID);
return retVal;
}
// Return a string representation of CMD_TYPE enum
-static string cmdTypeToString(CMD_TYPE cmd)
-{
- switch (cmd)
- {
- case CMD_BINDPIPELINE:
- return "CMD_BINDPIPELINE";
- case CMD_BINDPIPELINEDELTA:
- return "CMD_BINDPIPELINEDELTA";
- case CMD_SETVIEWPORTSTATE:
- return "CMD_SETVIEWPORTSTATE";
- case CMD_SETLINEWIDTHSTATE:
- return "CMD_SETLINEWIDTHSTATE";
- case CMD_SETDEPTHBIASSTATE:
- return "CMD_SETDEPTHBIASSTATE";
- case CMD_SETBLENDSTATE:
- return "CMD_SETBLENDSTATE";
- case CMD_SETDEPTHBOUNDSSTATE:
- return "CMD_SETDEPTHBOUNDSSTATE";
- case CMD_SETSTENCILREADMASKSTATE:
- return "CMD_SETSTENCILREADMASKSTATE";
- case CMD_SETSTENCILWRITEMASKSTATE:
- return "CMD_SETSTENCILWRITEMASKSTATE";
- case CMD_SETSTENCILREFERENCESTATE:
- return "CMD_SETSTENCILREFERENCESTATE";
- case CMD_BINDDESCRIPTORSETS:
- return "CMD_BINDDESCRIPTORSETS";
- case CMD_BINDINDEXBUFFER:
- return "CMD_BINDINDEXBUFFER";
- case CMD_BINDVERTEXBUFFER:
- return "CMD_BINDVERTEXBUFFER";
- case CMD_DRAW:
- return "CMD_DRAW";
- case CMD_DRAWINDEXED:
- return "CMD_DRAWINDEXED";
- case CMD_DRAWINDIRECT:
- return "CMD_DRAWINDIRECT";
- case CMD_DRAWINDEXEDINDIRECT:
- return "CMD_DRAWINDEXEDINDIRECT";
- case CMD_DISPATCH:
- return "CMD_DISPATCH";
- case CMD_DISPATCHINDIRECT:
- return "CMD_DISPATCHINDIRECT";
- case CMD_COPYBUFFER:
- return "CMD_COPYBUFFER";
- case CMD_COPYIMAGE:
- return "CMD_COPYIMAGE";
- case CMD_BLITIMAGE:
- return "CMD_BLITIMAGE";
- case CMD_COPYBUFFERTOIMAGE:
- return "CMD_COPYBUFFERTOIMAGE";
- case CMD_COPYIMAGETOBUFFER:
- return "CMD_COPYIMAGETOBUFFER";
- case CMD_CLONEIMAGEDATA:
- return "CMD_CLONEIMAGEDATA";
- case CMD_UPDATEBUFFER:
- return "CMD_UPDATEBUFFER";
- case CMD_FILLBUFFER:
- return "CMD_FILLBUFFER";
- case CMD_CLEARCOLORIMAGE:
- return "CMD_CLEARCOLORIMAGE";
- case CMD_CLEARATTACHMENTS:
- return "CMD_CLEARCOLORATTACHMENT";
- case CMD_CLEARDEPTHSTENCILIMAGE:
- return "CMD_CLEARDEPTHSTENCILIMAGE";
- case CMD_RESOLVEIMAGE:
- return "CMD_RESOLVEIMAGE";
- case CMD_SETEVENT:
- return "CMD_SETEVENT";
- case CMD_RESETEVENT:
- return "CMD_RESETEVENT";
- case CMD_WAITEVENTS:
- return "CMD_WAITEVENTS";
- case CMD_PIPELINEBARRIER:
- return "CMD_PIPELINEBARRIER";
- case CMD_BEGINQUERY:
- return "CMD_BEGINQUERY";
- case CMD_ENDQUERY:
- return "CMD_ENDQUERY";
- case CMD_RESETQUERYPOOL:
- return "CMD_RESETQUERYPOOL";
- case CMD_COPYQUERYPOOLRESULTS:
- return "CMD_COPYQUERYPOOLRESULTS";
- case CMD_WRITETIMESTAMP:
- return "CMD_WRITETIMESTAMP";
- case CMD_INITATOMICCOUNTERS:
- return "CMD_INITATOMICCOUNTERS";
- case CMD_LOADATOMICCOUNTERS:
- return "CMD_LOADATOMICCOUNTERS";
- case CMD_SAVEATOMICCOUNTERS:
- return "CMD_SAVEATOMICCOUNTERS";
- case CMD_BEGINRENDERPASS:
- return "CMD_BEGINRENDERPASS";
- case CMD_ENDRENDERPASS:
- return "CMD_ENDRENDERPASS";
- case CMD_DBGMARKERBEGIN:
- return "CMD_DBGMARKERBEGIN";
- case CMD_DBGMARKEREND:
- return "CMD_DBGMARKEREND";
- default:
- return "UNKNOWN";
+static string cmdTypeToString(CMD_TYPE cmd) {
+ switch (cmd) {
+ case CMD_BINDPIPELINE:
+ return "CMD_BINDPIPELINE";
+ case CMD_BINDPIPELINEDELTA:
+ return "CMD_BINDPIPELINEDELTA";
+ case CMD_SETVIEWPORTSTATE:
+ return "CMD_SETVIEWPORTSTATE";
+ case CMD_SETLINEWIDTHSTATE:
+ return "CMD_SETLINEWIDTHSTATE";
+ case CMD_SETDEPTHBIASSTATE:
+ return "CMD_SETDEPTHBIASSTATE";
+ case CMD_SETBLENDSTATE:
+ return "CMD_SETBLENDSTATE";
+ case CMD_SETDEPTHBOUNDSSTATE:
+ return "CMD_SETDEPTHBOUNDSSTATE";
+ case CMD_SETSTENCILREADMASKSTATE:
+ return "CMD_SETSTENCILREADMASKSTATE";
+ case CMD_SETSTENCILWRITEMASKSTATE:
+ return "CMD_SETSTENCILWRITEMASKSTATE";
+ case CMD_SETSTENCILREFERENCESTATE:
+ return "CMD_SETSTENCILREFERENCESTATE";
+ case CMD_BINDDESCRIPTORSETS:
+ return "CMD_BINDDESCRIPTORSETS";
+ case CMD_BINDINDEXBUFFER:
+ return "CMD_BINDINDEXBUFFER";
+ case CMD_BINDVERTEXBUFFER:
+ return "CMD_BINDVERTEXBUFFER";
+ case CMD_DRAW:
+ return "CMD_DRAW";
+ case CMD_DRAWINDEXED:
+ return "CMD_DRAWINDEXED";
+ case CMD_DRAWINDIRECT:
+ return "CMD_DRAWINDIRECT";
+ case CMD_DRAWINDEXEDINDIRECT:
+ return "CMD_DRAWINDEXEDINDIRECT";
+ case CMD_DISPATCH:
+ return "CMD_DISPATCH";
+ case CMD_DISPATCHINDIRECT:
+ return "CMD_DISPATCHINDIRECT";
+ case CMD_COPYBUFFER:
+ return "CMD_COPYBUFFER";
+ case CMD_COPYIMAGE:
+ return "CMD_COPYIMAGE";
+ case CMD_BLITIMAGE:
+ return "CMD_BLITIMAGE";
+ case CMD_COPYBUFFERTOIMAGE:
+ return "CMD_COPYBUFFERTOIMAGE";
+ case CMD_COPYIMAGETOBUFFER:
+ return "CMD_COPYIMAGETOBUFFER";
+ case CMD_CLONEIMAGEDATA:
+ return "CMD_CLONEIMAGEDATA";
+ case CMD_UPDATEBUFFER:
+ return "CMD_UPDATEBUFFER";
+ case CMD_FILLBUFFER:
+ return "CMD_FILLBUFFER";
+ case CMD_CLEARCOLORIMAGE:
+ return "CMD_CLEARCOLORIMAGE";
+ case CMD_CLEARATTACHMENTS:
+ return "CMD_CLEARCOLORATTACHMENT";
+ case CMD_CLEARDEPTHSTENCILIMAGE:
+ return "CMD_CLEARDEPTHSTENCILIMAGE";
+ case CMD_RESOLVEIMAGE:
+ return "CMD_RESOLVEIMAGE";
+ case CMD_SETEVENT:
+ return "CMD_SETEVENT";
+ case CMD_RESETEVENT:
+ return "CMD_RESETEVENT";
+ case CMD_WAITEVENTS:
+ return "CMD_WAITEVENTS";
+ case CMD_PIPELINEBARRIER:
+ return "CMD_PIPELINEBARRIER";
+ case CMD_BEGINQUERY:
+ return "CMD_BEGINQUERY";
+ case CMD_ENDQUERY:
+ return "CMD_ENDQUERY";
+ case CMD_RESETQUERYPOOL:
+ return "CMD_RESETQUERYPOOL";
+ case CMD_COPYQUERYPOOLRESULTS:
+ return "CMD_COPYQUERYPOOLRESULTS";
+ case CMD_WRITETIMESTAMP:
+ return "CMD_WRITETIMESTAMP";
+ case CMD_INITATOMICCOUNTERS:
+ return "CMD_INITATOMICCOUNTERS";
+ case CMD_LOADATOMICCOUNTERS:
+ return "CMD_LOADATOMICCOUNTERS";
+ case CMD_SAVEATOMICCOUNTERS:
+ return "CMD_SAVEATOMICCOUNTERS";
+ case CMD_BEGINRENDERPASS:
+ return "CMD_BEGINRENDERPASS";
+ case CMD_ENDRENDERPASS:
+ return "CMD_ENDRENDERPASS";
+ case CMD_DBGMARKERBEGIN:
+ return "CMD_DBGMARKERBEGIN";
+ case CMD_DBGMARKEREND:
+ return "CMD_DBGMARKEREND";
+ default:
+ return "UNKNOWN";
}
}
// SPIRV utility functions
-static void
-build_def_index(shader_module *module)
-{
+static void build_def_index(shader_module *module) {
for (auto insn : *module) {
switch (insn.opcode()) {
/* Types */
@@ -402,44 +407,52 @@
}
}
-bool
-shader_is_spirv(VkShaderModuleCreateInfo const *pCreateInfo)
-{
+bool shader_is_spirv(VkShaderModuleCreateInfo const *pCreateInfo) {
uint32_t *words = (uint32_t *)pCreateInfo->pCode;
size_t sizeInWords = pCreateInfo->codeSize / sizeof(uint32_t);
/* Just validate that the header makes sense. */
- return sizeInWords >= 5 && words[0] == spv::MagicNumber && words[1] == spv::Version;
+ return sizeInWords >= 5 && words[0] == spv::MagicNumber &&
+ words[1] == spv::Version;
}
-static char const *
-storage_class_name(unsigned sc)
-{
+static char const *storage_class_name(unsigned sc) {
switch (sc) {
- case spv::StorageClassInput: return "input";
- case spv::StorageClassOutput: return "output";
- case spv::StorageClassUniformConstant: return "const uniform";
- case spv::StorageClassUniform: return "uniform";
- case spv::StorageClassWorkgroup: return "workgroup local";
- case spv::StorageClassCrossWorkgroup: return "workgroup global";
- case spv::StorageClassPrivate: return "private global";
- case spv::StorageClassFunction: return "function";
- case spv::StorageClassGeneric: return "generic";
- case spv::StorageClassAtomicCounter: return "atomic counter";
- case spv::StorageClassImage: return "image";
- default: return "unknown";
+ case spv::StorageClassInput:
+ return "input";
+ case spv::StorageClassOutput:
+ return "output";
+ case spv::StorageClassUniformConstant:
+ return "const uniform";
+ case spv::StorageClassUniform:
+ return "uniform";
+ case spv::StorageClassWorkgroup:
+ return "workgroup local";
+ case spv::StorageClassCrossWorkgroup:
+ return "workgroup global";
+ case spv::StorageClassPrivate:
+ return "private global";
+ case spv::StorageClassFunction:
+ return "function";
+ case spv::StorageClassGeneric:
+ return "generic";
+ case spv::StorageClassAtomicCounter:
+ return "atomic counter";
+ case spv::StorageClassImage:
+ return "image";
+ default:
+ return "unknown";
}
}
/* get the value of an integral constant */
-unsigned
-get_constant_value(shader_module const *src, unsigned id)
-{
+unsigned get_constant_value(shader_module const *src, unsigned id) {
auto value = src->get_def(id);
assert(value != src->end());
if (value.opcode() != spv::OpConstant) {
- /* TODO: Either ensure that the specialization transform is already performed on a module we're
+ /* TODO: Either ensure that the specialization transform is already
+ performed on a module we're
considering here, OR -- specialize on the fly now.
*/
return 1;
@@ -449,50 +462,48 @@
}
/* returns ptr to null terminator */
-static char *
-describe_type(char *dst, shader_module const *src, unsigned type)
-{
+static char *describe_type(char *dst, shader_module const *src, unsigned type) {
auto insn = src->get_def(type);
assert(insn != src->end());
switch (insn.opcode()) {
- case spv::OpTypeBool:
- return dst + sprintf(dst, "bool");
- case spv::OpTypeInt:
- return dst + sprintf(dst, "%cint%d", insn.word(3) ? 's' : 'u', insn.word(2));
- case spv::OpTypeFloat:
- return dst + sprintf(dst, "float%d", insn.word(2));
- case spv::OpTypeVector:
- dst += sprintf(dst, "vec%d of ", insn.word(3));
- return describe_type(dst, src, insn.word(2));
- case spv::OpTypeMatrix:
- dst += sprintf(dst, "mat%d of ", insn.word(3));
- return describe_type(dst, src, insn.word(2));
- case spv::OpTypeArray:
- dst += sprintf(dst, "arr[%d] of ", get_constant_value(src, insn.word(3)));
- return describe_type(dst, src, insn.word(2));
- case spv::OpTypePointer:
- dst += sprintf(dst, "ptr to %s ", storage_class_name(insn.word(2)));
- return describe_type(dst, src, insn.word(3));
- case spv::OpTypeStruct:
- {
- dst += sprintf(dst, "struct of (");
- for (unsigned i = 2; i < insn.len(); i++) {
- dst = describe_type(dst, src, insn.word(i));
- dst += sprintf(dst, i == insn.len()-1 ? ")" : ", ");
- }
- return dst;
- }
- case spv::OpTypeSampler:
- return dst + sprintf(dst, "sampler");
- default:
- return dst + sprintf(dst, "oddtype");
+ case spv::OpTypeBool:
+ return dst + sprintf(dst, "bool");
+ case spv::OpTypeInt:
+ return dst +
+ sprintf(dst, "%cint%d", insn.word(3) ? 's' : 'u', insn.word(2));
+ case spv::OpTypeFloat:
+ return dst + sprintf(dst, "float%d", insn.word(2));
+ case spv::OpTypeVector:
+ dst += sprintf(dst, "vec%d of ", insn.word(3));
+ return describe_type(dst, src, insn.word(2));
+ case spv::OpTypeMatrix:
+ dst += sprintf(dst, "mat%d of ", insn.word(3));
+ return describe_type(dst, src, insn.word(2));
+ case spv::OpTypeArray:
+ dst +=
+ sprintf(dst, "arr[%d] of ", get_constant_value(src, insn.word(3)));
+ return describe_type(dst, src, insn.word(2));
+ case spv::OpTypePointer:
+ dst += sprintf(dst, "ptr to %s ", storage_class_name(insn.word(2)));
+ return describe_type(dst, src, insn.word(3));
+ case spv::OpTypeStruct: {
+ dst += sprintf(dst, "struct of (");
+ for (unsigned i = 2; i < insn.len(); i++) {
+ dst = describe_type(dst, src, insn.word(i));
+ dst += sprintf(dst, i == insn.len() - 1 ? ")" : ", ");
+ }
+ return dst;
+ }
+ case spv::OpTypeSampler:
+ return dst + sprintf(dst, "sampler");
+ default:
+ return dst + sprintf(dst, "oddtype");
}
}
-static bool
-types_match(shader_module const *a, shader_module const *b, unsigned a_type, unsigned b_type, bool b_arrayed)
-{
+static bool types_match(shader_module const *a, shader_module const *b,
+ unsigned a_type, unsigned b_type, bool b_arrayed) {
/* walk two type trees together, and complain about differences */
auto a_insn = a->get_def(a_type);
auto b_insn = b->get_def(b_type);
@@ -500,7 +511,8 @@
assert(b_insn != b->end());
if (b_arrayed && b_insn.opcode() == spv::OpTypeArray) {
- /* we probably just found the extra level of arrayness in b_type: compare the type inside it to a_type */
+ /* we probably just found the extra level of arrayness in b_type:
+ * compare the type inside it to a_type */
return types_match(a, b, a_type, b_insn.word(2), false);
}
@@ -509,65 +521,71 @@
}
switch (a_insn.opcode()) {
- /* if b_arrayed and we hit a leaf type, then we can't match -- there's nowhere for the extra OpTypeArray to be! */
- case spv::OpTypeBool:
- return true && !b_arrayed;
- case spv::OpTypeInt:
- /* match on width, signedness */
- return a_insn.word(2) == b_insn.word(2) && a_insn.word(3) == b_insn.word(3) && !b_arrayed;
- case spv::OpTypeFloat:
- /* match on width */
- return a_insn.word(2) == b_insn.word(2) && !b_arrayed;
- case spv::OpTypeVector:
- case spv::OpTypeMatrix:
- /* match on element type, count. these all have the same layout. we don't get here if
- * b_arrayed -- that is handled above. */
- return !b_arrayed &&
- types_match(a, b, a_insn.word(2), b_insn.word(2), b_arrayed) &&
- a_insn.word(3) == b_insn.word(3);
- case spv::OpTypeArray:
- /* match on element type, count. these all have the same layout. we don't get here if
- * b_arrayed. This differs from vector & matrix types in that the array size is the id of a constant instruction,
- * not a literal within OpTypeArray */
- return !b_arrayed &&
- types_match(a, b, a_insn.word(2), b_insn.word(2), b_arrayed) &&
- get_constant_value(a, a_insn.word(3)) == get_constant_value(b, b_insn.word(3));
- case spv::OpTypeStruct:
- /* match on all element types */
- {
- if (b_arrayed) {
- /* for the purposes of matching different levels of arrayness, structs are leaves. */
+ /* if b_arrayed and we hit a leaf type, then we can't match -- there's
+ * nowhere for the extra OpTypeArray to be! */
+ case spv::OpTypeBool:
+ return true && !b_arrayed;
+ case spv::OpTypeInt:
+ /* match on width, signedness */
+ return a_insn.word(2) == b_insn.word(2) &&
+ a_insn.word(3) == b_insn.word(3) && !b_arrayed;
+ case spv::OpTypeFloat:
+ /* match on width */
+ return a_insn.word(2) == b_insn.word(2) && !b_arrayed;
+ case spv::OpTypeVector:
+ case spv::OpTypeMatrix:
+ /* match on element type, count. these all have the same layout. we
+ * don't get here if
+ * b_arrayed -- that is handled above. */
+ return !b_arrayed &&
+ types_match(a, b, a_insn.word(2), b_insn.word(2), b_arrayed) &&
+ a_insn.word(3) == b_insn.word(3);
+ case spv::OpTypeArray:
+ /* match on element type, count. these all have the same layout. we
+ * don't get here if
+ * b_arrayed. This differs from vector & matrix types in that the array
+ * size is the id of a constant instruction,
+ * not a literal within OpTypeArray */
+ return !b_arrayed &&
+ types_match(a, b, a_insn.word(2), b_insn.word(2), b_arrayed) &&
+ get_constant_value(a, a_insn.word(3)) ==
+ get_constant_value(b, b_insn.word(3));
+ case spv::OpTypeStruct:
+ /* match on all element types */
+ {
+ if (b_arrayed) {
+ /* for the purposes of matching different levels of arrayness,
+ * structs are leaves. */
+ return false;
+ }
+
+ if (a_insn.len() != b_insn.len()) {
+ return false; /* structs cannot match if member counts differ */
+ }
+
+ for (unsigned i = 2; i < a_insn.len(); i++) {
+ if (!types_match(a, b, a_insn.word(i), b_insn.word(i),
+ b_arrayed)) {
return false;
}
-
- if (a_insn.len() != b_insn.len()) {
- return false; /* structs cannot match if member counts differ */
- }
-
- for (unsigned i = 2; i < a_insn.len(); i++) {
- if (!types_match(a, b, a_insn.word(i), b_insn.word(i), b_arrayed)) {
- return false;
- }
- }
-
- return true;
}
- case spv::OpTypePointer:
- /* match on pointee type. storage class is expected to differ */
- return types_match(a, b, a_insn.word(3), b_insn.word(3), b_arrayed);
- default:
- /* remaining types are CLisms, or may not appear in the interfaces we
- * are interested in. Just claim no match.
- */
- return false;
+ return true;
+ }
+ case spv::OpTypePointer:
+ /* match on pointee type. storage class is expected to differ */
+ return types_match(a, b, a_insn.word(3), b_insn.word(3), b_arrayed);
+ default:
+ /* remaining types are CLisms, or may not appear in the interfaces we
+ * are interested in. Just claim no match.
+ */
+ return false;
}
}
-static int
-value_or_default(std::unordered_map<unsigned, unsigned> const &map, unsigned id, int def)
-{
+static int value_or_default(std::unordered_map<unsigned, unsigned> const &map,
+ unsigned id, int def) {
auto it = map.find(id);
if (it == map.end())
return def;
@@ -575,38 +593,39 @@
return it->second;
}
-
-static unsigned
-get_locations_consumed_by_type(shader_module const *src, unsigned type, bool strip_array_level)
-{
+static unsigned get_locations_consumed_by_type(shader_module const *src,
+ unsigned type,
+ bool strip_array_level) {
auto insn = src->get_def(type);
assert(insn != src->end());
switch (insn.opcode()) {
- case spv::OpTypePointer:
- /* see through the ptr -- this is only ever at the toplevel for graphics shaders;
- * we're never actually passing pointers around. */
- return get_locations_consumed_by_type(src, insn.word(3), strip_array_level);
- case spv::OpTypeArray:
- if (strip_array_level) {
- return get_locations_consumed_by_type(src, insn.word(2), false);
- }
- else {
- return get_constant_value(src, insn.word(3)) * get_locations_consumed_by_type(src, insn.word(2), false);
- }
- case spv::OpTypeMatrix:
- /* num locations is the dimension * element size */
- return insn.word(3) * get_locations_consumed_by_type(src, insn.word(2), false);
- default:
- /* everything else is just 1. */
- return 1;
+ case spv::OpTypePointer:
+ /* see through the ptr -- this is only ever at the toplevel for graphics
+ * shaders;
+ * we're never actually passing pointers around. */
+ return get_locations_consumed_by_type(src, insn.word(3),
+ strip_array_level);
+ case spv::OpTypeArray:
+ if (strip_array_level) {
+ return get_locations_consumed_by_type(src, insn.word(2), false);
+ } else {
+ return get_constant_value(src, insn.word(3)) *
+ get_locations_consumed_by_type(src, insn.word(2), false);
+ }
+ case spv::OpTypeMatrix:
+ /* num locations is the dimension * element size */
+ return insn.word(3) *
+ get_locations_consumed_by_type(src, insn.word(2), false);
+ default:
+ /* everything else is just 1. */
+ return 1;
/* TODO: extend to handle 64bit scalar types, whose vectors may need
* multiple locations. */
}
}
-
struct interface_var {
uint32_t id;
uint32_t type_id;
@@ -614,40 +633,32 @@
/* TODO: collect the name, too? Isn't required to be present. */
};
-
-static void
-collect_interface_block_members(layer_data *my_data, VkDevice dev,
- shader_module const *src,
- std::map<uint32_t, interface_var> &out,
- std::map<uint32_t, interface_var> &builtins_out,
- std::unordered_map<unsigned, unsigned> const &blocks,
- bool is_array_of_verts,
- uint32_t id,
- uint32_t type_id)
-{
- /* Walk down the type_id presented, trying to determine whether it's actually an interface block. */
+static void collect_interface_block_members(
+ layer_data *my_data, VkDevice dev, shader_module const *src,
+ std::map<uint32_t, interface_var> &out,
+ std::map<uint32_t, interface_var> &builtins_out,
+ std::unordered_map<unsigned, unsigned> const &blocks,
+ bool is_array_of_verts, uint32_t id, uint32_t type_id) {
+ /* Walk down the type_id presented, trying to determine whether it's
+ * actually an interface block. */
auto type = src->get_def(type_id);
while (true) {
if (type.opcode() == spv::OpTypePointer) {
type = src->get_def(type.word(3));
- }
- else if (type.opcode() == spv::OpTypeArray && is_array_of_verts) {
+ } else if (type.opcode() == spv::OpTypeArray && is_array_of_verts) {
type = src->get_def(type.word(2));
is_array_of_verts = false;
- }
- else if (type.opcode() == spv::OpTypeStruct) {
+ } else if (type.opcode() == spv::OpTypeStruct) {
if (blocks.find(type.word(1)) == blocks.end()) {
/* This isn't an interface block. */
return;
- }
- else {
+ } else {
/* We have found the correct type. Walk its members. */
break;
}
- }
- else {
+ } else {
/* not an interface block */
return;
}
@@ -655,14 +666,17 @@
/* Walk all the OpMemberDecorate for type's result id. */
for (auto insn : *src) {
- if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
+ if (insn.opcode() == spv::OpMemberDecorate &&
+ insn.word(1) == type.word(1)) {
unsigned member_index = insn.word(2);
unsigned member_type_id = type.word(2 + member_index);
if (insn.word(3) == spv::DecorationLocation) {
unsigned location = insn.word(4);
- unsigned num_locations = get_locations_consumed_by_type(src, member_type_id, false);
- for (unsigned int offset = 0; offset < num_locations; offset++) {
+ unsigned num_locations =
+ get_locations_consumed_by_type(src, member_type_id, false);
+ for (unsigned int offset = 0; offset < num_locations;
+ offset++) {
interface_var v;
v.id = id;
/* TODO: member index in interface_var too? */
@@ -670,8 +684,7 @@
v.offset = offset;
out[location + offset] = v;
}
- }
- else if (insn.word(3) == spv::DecorationBuiltIn) {
+ } else if (insn.word(3) == spv::DecorationBuiltIn) {
unsigned builtin = insn.word(4);
interface_var v;
v.id = id;
@@ -683,13 +696,10 @@
}
}
-static void
-collect_interface_by_location(layer_data *my_data, VkDevice dev,
- shader_module const *src, spv::StorageClass sinterface,
- std::map<uint32_t, interface_var> &out,
- std::map<uint32_t, interface_var> &builtins_out,
- bool is_array_of_verts)
-{
+static void collect_interface_by_location(
+ layer_data *my_data, VkDevice dev, shader_module const *src,
+ spv::StorageClass sinterface, std::map<uint32_t, interface_var> &out,
+ std::map<uint32_t, interface_var> &builtins_out, bool is_array_of_verts) {
std::unordered_map<unsigned, unsigned> var_locations;
std::unordered_map<unsigned, unsigned> var_builtins;
std::unordered_map<unsigned, unsigned> blocks;
@@ -717,65 +727,73 @@
/* TODO: handle index=1 dual source outputs from FS -- two vars will
* have the same location, and we DONT want to clobber. */
- else if (insn.opcode() == spv::OpVariable && insn.word(3) == sinterface) {
+ else if (insn.opcode() == spv::OpVariable &&
+ insn.word(3) == sinterface) {
unsigned id = insn.word(2);
unsigned type = insn.word(1);
int location = value_or_default(var_locations, id, -1);
int builtin = value_or_default(var_builtins, id, -1);
- /* All variables and interface block members in the Input or Output storage classes
+ /* All variables and interface block members in the Input or Output
+ *storage classes
* must be decorated with either a builtin or an explicit location.
*
- * TODO: integrate the interface block support here. For now, don't complain --
- * a valid SPIRV module will only hit this path for the interface block case, as the
- * individual members of the type are decorated, rather than variable declarations.
+ * TODO: integrate the interface block support here. For now, don't
+ *complain --
+ * a valid SPIRV module will only hit this path for the interface
+ *block case, as the
+ * individual members of the type are decorated, rather than
+ *variable declarations.
*/
if (location != -1) {
- /* A user-defined interface variable, with a location. Where a variable
+ /* A user-defined interface variable, with a location. Where a
+ * variable
* occupied multiple locations, emit one result for each. */
- unsigned num_locations = get_locations_consumed_by_type(src, type,
- is_array_of_verts);
- for (unsigned int offset = 0; offset < num_locations; offset++) {
+ unsigned num_locations = get_locations_consumed_by_type(
+ src, type, is_array_of_verts);
+ for (unsigned int offset = 0; offset < num_locations;
+ offset++) {
interface_var v;
v.id = id;
v.type_id = type;
v.offset = offset;
out[location + offset] = v;
}
- }
- else if (builtin != -1) {
+ } else if (builtin != -1) {
/* A builtin interface variable */
- /* Note that since builtin interface variables do not consume numbered
- * locations, there is no larger-than-vec4 consideration as above
+ /* Note that since builtin interface variables do not consume
+ * numbered
+ * locations, there is no larger-than-vec4 consideration as
+ * above
*/
interface_var v;
v.id = id;
v.type_id = type;
v.offset = 0;
builtins_out[builtin] = v;
- }
- else {
+ } else {
/* An interface block instance */
- collect_interface_block_members(my_data, dev, src, out, builtins_out,
- blocks, is_array_of_verts, id, type);
+ collect_interface_block_members(my_data, dev, src, out,
+ builtins_out, blocks,
+ is_array_of_verts, id, type);
}
}
}
}
-static void
-collect_interface_by_descriptor_slot(layer_data *my_data, VkDevice dev,
- shader_module const *src, spv::StorageClass sinterface,
- std::map<std::pair<unsigned, unsigned>, interface_var> &out)
-{
+static void collect_interface_by_descriptor_slot(
+ layer_data *my_data, VkDevice dev, shader_module const *src,
+ spv::StorageClass sinterface,
+ std::map<std::pair<unsigned, unsigned>, interface_var> &out) {
std::unordered_map<unsigned, unsigned> var_sets;
std::unordered_map<unsigned, unsigned> var_bindings;
for (auto insn : *src) {
- /* All variables in the Uniform or UniformConstant storage classes are required to be decorated with both
+ /* All variables in the Uniform or UniformConstant storage classes are
+ * required to be decorated with both
* DecorationDescriptorSet and DecorationBinding.
*/
if (insn.opcode() == spv::OpDecorate) {
@@ -789,18 +807,21 @@
}
else if (insn.opcode() == spv::OpVariable &&
- (insn.word(3) == spv::StorageClassUniform ||
- insn.word(3) == spv::StorageClassUniformConstant)) {
+ (insn.word(3) == spv::StorageClassUniform ||
+ insn.word(3) == spv::StorageClassUniformConstant)) {
unsigned set = value_or_default(var_sets, insn.word(2), 0);
unsigned binding = value_or_default(var_bindings, insn.word(2), 0);
auto existing_it = out.find(std::make_pair(set, binding));
if (existing_it != out.end()) {
/* conflict within spv image */
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__,
- SHADER_CHECKER_INCONSISTENT_SPIRV, "SC",
- "var %d (type %d) in %s interface in descriptor slot (%u,%u) conflicts with existing definition",
- insn.word(2), insn.word(1), storage_class_name(sinterface),
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_INCONSISTENT_SPIRV, "SC",
+ "var %d (type %d) in %s interface in descriptor slot "
+ "(%u,%u) conflicts with existing definition",
+ insn.word(2), insn.word(1),
+ storage_class_name(sinterface),
existing_it->first.first, existing_it->first.second);
}
@@ -812,12 +833,12 @@
}
}
-static bool
-validate_interface_between_stages(layer_data *my_data, VkDevice dev,
- shader_module const *producer, char const *producer_name,
- shader_module const *consumer, char const *consumer_name,
- bool consumer_arrayed_input)
-{
+static bool validate_interface_between_stages(layer_data *my_data, VkDevice dev,
+ shader_module const *producer,
+ char const *producer_name,
+ shader_module const *consumer,
+ char const *consumer_name,
+ bool consumer_arrayed_input) {
std::map<uint32_t, interface_var> outputs;
std::map<uint32_t, interface_var> inputs;
@@ -826,47 +847,60 @@
bool pass = true;
- collect_interface_by_location(my_data, dev, producer, spv::StorageClassOutput, outputs, builtin_outputs, false);
- collect_interface_by_location(my_data, dev, consumer, spv::StorageClassInput, inputs, builtin_inputs,
- consumer_arrayed_input);
+ collect_interface_by_location(my_data, dev, producer,
+ spv::StorageClassOutput, outputs,
+ builtin_outputs, false);
+ collect_interface_by_location(my_data, dev, consumer,
+ spv::StorageClassInput, inputs,
+ builtin_inputs, consumer_arrayed_input);
auto a_it = outputs.begin();
auto b_it = inputs.begin();
/* maps sorted by key (location); walk them together to find mismatches */
- while ((outputs.size() > 0 && a_it != outputs.end()) || ( inputs.size() && b_it != inputs.end())) {
+ while ((outputs.size() > 0 && a_it != outputs.end()) ||
+ (inputs.size() && b_it != inputs.end())) {
bool a_at_end = outputs.size() == 0 || a_it == outputs.end();
- bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
+ bool b_at_end = inputs.size() == 0 || b_it == inputs.end();
auto a_first = a_at_end ? 0 : a_it->first;
auto b_first = b_at_end ? 0 : b_it->first;
if (b_at_end || ((!a_at_end) && (a_first < b_first))) {
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
- "%s writes to output location %d which is not consumed by %s", producer_name, a_first, consumer_name)) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
+ "%s writes to output location %d which is not consumed "
+ "by %s",
+ producer_name, a_first, consumer_name)) {
pass = false;
}
a_it++;
- }
- else if (a_at_end || a_first > b_first) {
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
- "%s consumes input location %d which is not written by %s", consumer_name, b_first, producer_name)) {
+ } else if (a_at_end || a_first > b_first) {
+ if (log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0, __LINE__,
+ SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
+ "%s consumes input location %d which is not written by %s",
+ consumer_name, b_first, producer_name)) {
pass = false;
}
b_it++;
- }
- else {
- if (types_match(producer, consumer, a_it->second.type_id, b_it->second.type_id, consumer_arrayed_input)) {
+ } else {
+ if (types_match(producer, consumer, a_it->second.type_id,
+ b_it->second.type_id, consumer_arrayed_input)) {
/* OK! */
- }
- else {
+ } else {
char producer_type[1024];
char consumer_type[1024];
describe_type(producer_type, producer, a_it->second.type_id);
describe_type(consumer_type, consumer, b_it->second.type_id);
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
- "Type mismatch on location %d: '%s' vs '%s'", a_it->first, producer_type, consumer_type)) {
- pass = false;
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH,
+ "SC", "Type mismatch on location %d: '%s' vs '%s'",
+ a_it->first, producer_type, consumer_type)) {
+ pass = false;
}
}
a_it++;
@@ -879,13 +913,13 @@
enum FORMAT_TYPE {
FORMAT_TYPE_UNDEFINED,
- FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader */
+ FORMAT_TYPE_FLOAT, /* UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB --
+ anything we consider float in the shader */
FORMAT_TYPE_SINT,
FORMAT_TYPE_UINT,
};
-static unsigned
-get_format_type(VkFormat fmt) {
+static unsigned get_format_type(VkFormat fmt) {
switch (fmt) {
case VK_FORMAT_UNDEFINED:
return FORMAT_TYPE_UNDEFINED;
@@ -930,49 +964,52 @@
/* characterizes a SPIR-V type appearing in an interface to a FF stage,
* for comparison to a VkFormat's characterization above. */
-static unsigned
-get_fundamental_type(shader_module const *src, unsigned type)
-{
+static unsigned get_fundamental_type(shader_module const *src, unsigned type) {
auto insn = src->get_def(type);
assert(insn != src->end());
switch (insn.opcode()) {
- case spv::OpTypeInt:
- return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
- case spv::OpTypeFloat:
- return FORMAT_TYPE_FLOAT;
- case spv::OpTypeVector:
- return get_fundamental_type(src, insn.word(2));
- case spv::OpTypeMatrix:
- return get_fundamental_type(src, insn.word(2));
- case spv::OpTypeArray:
- return get_fundamental_type(src, insn.word(2));
- case spv::OpTypePointer:
- return get_fundamental_type(src, insn.word(3));
- default:
- return FORMAT_TYPE_UNDEFINED;
+ case spv::OpTypeInt:
+ return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
+ case spv::OpTypeFloat:
+ return FORMAT_TYPE_FLOAT;
+ case spv::OpTypeVector:
+ return get_fundamental_type(src, insn.word(2));
+ case spv::OpTypeMatrix:
+ return get_fundamental_type(src, insn.word(2));
+ case spv::OpTypeArray:
+ return get_fundamental_type(src, insn.word(2));
+ case spv::OpTypePointer:
+ return get_fundamental_type(src, insn.word(3));
+ default:
+ return FORMAT_TYPE_UNDEFINED;
}
}
static bool
-validate_vi_consistency(layer_data *my_data, VkDevice dev, VkPipelineVertexInputStateCreateInfo const *vi)
-{
- /* walk the binding descriptions, which describe the step rate and stride of each vertex buffer.
+validate_vi_consistency(layer_data *my_data, VkDevice dev,
+ VkPipelineVertexInputStateCreateInfo const *vi) {
+ /* walk the binding descriptions, which describe the step rate and stride of
+ * each vertex buffer.
* each binding should be specified only once.
*/
- std::unordered_map<uint32_t, VkVertexInputBindingDescription const *> bindings;
+ std::unordered_map<uint32_t, VkVertexInputBindingDescription const *>
+ bindings;
bool pass = true;
for (unsigned i = 0; i < vi->vertexBindingDescriptionCount; i++) {
auto desc = &vi->pVertexBindingDescriptions[i];
- auto & binding = bindings[desc->binding];
+ auto &binding = bindings[desc->binding];
if (binding) {
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_INCONSISTENT_VI, "SC",
- "Duplicate vertex input binding descriptions for binding %d", desc->binding)) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_INCONSISTENT_VI, "SC",
+ "Duplicate vertex input binding descriptions for "
+ "binding %d",
+ desc->binding)) {
pass = false;
}
- }
- else {
+ } else {
binding = desc;
}
}
@@ -981,57 +1018,73 @@
}
static bool
-validate_vi_against_vs_inputs(layer_data *my_data, VkDevice dev, VkPipelineVertexInputStateCreateInfo const *vi, shader_module const *vs)
-{
+validate_vi_against_vs_inputs(layer_data *my_data, VkDevice dev,
+ VkPipelineVertexInputStateCreateInfo const *vi,
+ shader_module const *vs) {
std::map<uint32_t, interface_var> inputs;
/* we collect builtin inputs, but they will never appear in the VI state --
- * the vs builtin inputs are generated in the pipeline, not sourced from buffers (VertexID, etc)
+ * the vs builtin inputs are generated in the pipeline, not sourced from
+ * buffers (VertexID, etc)
*/
std::map<uint32_t, interface_var> builtin_inputs;
bool pass = true;
- collect_interface_by_location(my_data, dev, vs, spv::StorageClassInput, inputs, builtin_inputs, false);
+ collect_interface_by_location(my_data, dev, vs, spv::StorageClassInput,
+ inputs, builtin_inputs, false);
/* Build index by location */
std::map<uint32_t, VkVertexInputAttributeDescription const *> attribs;
if (vi) {
for (unsigned i = 0; i < vi->vertexAttributeDescriptionCount; i++)
- attribs[vi->pVertexAttributeDescriptions[i].location] = &vi->pVertexAttributeDescriptions[i];
+ attribs[vi->pVertexAttributeDescriptions[i].location] =
+ &vi->pVertexAttributeDescriptions[i];
}
auto it_a = attribs.begin();
auto it_b = inputs.begin();
- while ((attribs.size() > 0 && it_a != attribs.end()) || (inputs.size() > 0 && it_b != inputs.end())) {
+ while ((attribs.size() > 0 && it_a != attribs.end()) ||
+ (inputs.size() > 0 && it_b != inputs.end())) {
bool a_at_end = attribs.size() == 0 || it_a == attribs.end();
- bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
+ bool b_at_end = inputs.size() == 0 || it_b == inputs.end();
auto a_first = a_at_end ? 0 : it_a->first;
auto b_first = b_at_end ? 0 : it_b->first;
if (!a_at_end && (b_at_end || a_first < b_first)) {
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
- "Vertex attribute at location %d not consumed by VS", a_first)) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
+ "Vertex attribute at location %d not consumed by VS",
+ a_first)) {
pass = false;
}
it_a++;
- }
- else if (!b_at_end && (a_at_end || b_first < a_first)) {
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
- "VS consumes input at location %d but not provided", b_first)) {
+ } else if (!b_at_end && (a_at_end || b_first < a_first)) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
+ "VS consumes input at location %d but not provided",
+ b_first)) {
pass = false;
}
it_b++;
- }
- else {
+ } else {
unsigned attrib_type = get_format_type(it_a->second->format);
- unsigned input_type = get_fundamental_type(vs, it_b->second.type_id);
+ unsigned input_type =
+ get_fundamental_type(vs, it_b->second.type_id);
/* type checking */
- if (attrib_type != FORMAT_TYPE_UNDEFINED && input_type != FORMAT_TYPE_UNDEFINED && attrib_type != input_type) {
+ if (attrib_type != FORMAT_TYPE_UNDEFINED &&
+ input_type != FORMAT_TYPE_UNDEFINED &&
+ attrib_type != input_type) {
char vs_type[1024];
describe_type(vs_type, vs, it_b->second.type_id);
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
- "Attribute type of `%s` at location %d does not match VS input type of `%s`",
- string_VkFormat(it_a->second->format), a_first, vs_type)) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH,
+ "SC", "Attribute type of `%s` at location %d does "
+ "not match VS input type of `%s`",
+ string_VkFormat(it_a->second->format), a_first,
+ vs_type)) {
pass = false;
}
}
@@ -1045,51 +1098,70 @@
return pass;
}
-static bool
-validate_fs_outputs_against_render_pass(layer_data *my_data, VkDevice dev, shader_module const *fs, RENDER_PASS_NODE const *rp, uint32_t subpass)
-{
- const std::vector<VkFormat> &color_formats = rp->subpassColorFormats[subpass];
+static bool validate_fs_outputs_against_render_pass(layer_data *my_data,
+ VkDevice dev,
+ shader_module const *fs,
+ RENDER_PASS_NODE const *rp,
+ uint32_t subpass) {
+ const std::vector<VkFormat> &color_formats =
+ rp->subpassColorFormats[subpass];
std::map<uint32_t, interface_var> outputs;
std::map<uint32_t, interface_var> builtin_outputs;
bool pass = true;
/* TODO: dual source blend index (spv::DecIndex, zero if not provided) */
- collect_interface_by_location(my_data, dev, fs, spv::StorageClassOutput, outputs, builtin_outputs, false);
+ collect_interface_by_location(my_data, dev, fs, spv::StorageClassOutput,
+ outputs, builtin_outputs, false);
auto it = outputs.begin();
uint32_t attachment = 0;
- /* Walk attachment list and outputs together -- this is a little overpowered since attachments
- * are currently dense, but the parallel with matching between shader stages is nice.
+ /* Walk attachment list and outputs together -- this is a little overpowered
+ * since attachments
+ * are currently dense, but the parallel with matching between shader stages
+ * is nice.
*/
- while ((outputs.size() > 0 && it != outputs.end()) || attachment < color_formats.size()) {
- if (attachment == color_formats.size() || ( it != outputs.end() && it->first < attachment)) {
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
- "FS writes to output location %d with no matching attachment", it->first)) {
+ while ((outputs.size() > 0 && it != outputs.end()) ||
+ attachment < color_formats.size()) {
+ if (attachment == color_formats.size() ||
+ (it != outputs.end() && it->first < attachment)) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_OUTPUT_NOT_CONSUMED, "SC",
+ "FS writes to output location %d with no matching "
+ "attachment",
+ it->first)) {
pass = false;
}
it++;
- }
- else if (it == outputs.end() || it->first > attachment) {
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
- "Attachment %d not written by FS", attachment)) {
+ } else if (it == outputs.end() || it->first > attachment) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_INPUT_NOT_PRODUCED, "SC",
+ "Attachment %d not written by FS", attachment)) {
pass = false;
}
attachment++;
- }
- else {
+ } else {
unsigned output_type = get_fundamental_type(fs, it->second.type_id);
unsigned att_type = get_format_type(color_formats[attachment]);
/* type checking */
- if (att_type != FORMAT_TYPE_UNDEFINED && output_type != FORMAT_TYPE_UNDEFINED && att_type != output_type) {
+ if (att_type != FORMAT_TYPE_UNDEFINED &&
+ output_type != FORMAT_TYPE_UNDEFINED &&
+ att_type != output_type) {
char fs_type[1024];
describe_type(fs_type, fs, it->second.type_id);
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, "SC",
- "Attachment %d of type `%s` does not match FS output type of `%s`",
- attachment, string_VkFormat(color_formats[attachment]), fs_type)) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_INTERFACE_TYPE_MISMATCH,
+ "SC", "Attachment %d of type `%s` does not match "
+ "FS output type of `%s`",
+ attachment,
+ string_VkFormat(color_formats[attachment]),
+ fs_type)) {
pass = false;
}
}
@@ -1103,61 +1175,60 @@
return pass;
}
-
struct shader_stage_attributes {
- char const * const name;
+ char const *const name;
bool arrayed_input;
};
-
-static shader_stage_attributes
-shader_stage_attribs[] = {
- { "vertex shader", false },
- { "tessellation control shader", true },
- { "tessellation evaluation shader", false },
- { "geometry shader", true },
- { "fragment shader", false },
+static shader_stage_attributes shader_stage_attribs[] = {
+ {"vertex shader", false},
+ {"tessellation control shader", true},
+ {"tessellation evaluation shader", false},
+ {"geometry shader", true},
+ {"fragment shader", false},
};
// For given pipelineLayout verify that the setLayout at slot.first
// has the requested binding at slot.second
static bool
-has_descriptor_binding(layer_data* my_data,
- vector<VkDescriptorSetLayout>* pipelineLayout,
- std::pair<unsigned, unsigned> slot)
-{
+has_descriptor_binding(layer_data *my_data,
+ vector<VkDescriptorSetLayout> *pipelineLayout,
+ std::pair<unsigned, unsigned> slot) {
if (!pipelineLayout)
return false;
if (slot.first >= pipelineLayout->size())
return false;
- auto set = my_data->descriptorSetLayoutMap[(*pipelineLayout)[slot.first]]->bindings;
+ auto set = my_data->descriptorSetLayoutMap[(*pipelineLayout)[slot.first]]
+ ->bindings;
return (set.find(slot.second) != set.end());
}
-static uint32_t get_shader_stage_id(VkShaderStageFlagBits stage)
-{
+static uint32_t get_shader_stage_id(VkShaderStageFlagBits stage) {
uint32_t bit_pos = u_ffs(stage);
- return bit_pos-1;
+ return bit_pos - 1;
}
-// Block of code at start here for managing/tracking Pipeline state that this layer cares about
+// Block of code at start here for managing/tracking Pipeline state that this
+// layer cares about
static uint64_t g_drawCount[NUM_DRAW_TYPES] = {0, 0, 0, 0};
-// TODO : Should be tracking lastBound per commandBuffer and when draws occur, report based on that cmd buffer lastBound
-// Then need to synchronize the accesses based on cmd buffer so that if I'm reading state on one cmd buffer, updates
-// to that same cmd buffer by separate thread are not changing state from underneath us
+// TODO : Should be tracking lastBound per commandBuffer and when draws occur,
+// report based on that cmd buffer lastBound
+// Then need to synchronize the accesses based on cmd buffer so that if I'm
+// reading state on one cmd buffer, updates
+// to that same cmd buffer by separate thread are not changing state from
+// underneath us
// Track the last cmd buffer touched by this thread
// prototype
-static GLOBAL_CB_NODE* getCBNode(layer_data*, const VkCommandBuffer);
+static GLOBAL_CB_NODE *getCBNode(layer_data *, const VkCommandBuffer);
-static VkBool32 hasDrawCmd(GLOBAL_CB_NODE* pCB)
-{
- for (uint32_t i=0; i<NUM_DRAW_TYPES; i++) {
+static VkBool32 hasDrawCmd(GLOBAL_CB_NODE *pCB) {
+ for (uint32_t i = 0; i < NUM_DRAW_TYPES; i++) {
if (pCB->drawCount[i])
return VK_TRUE;
}
@@ -1165,23 +1236,32 @@
}
// Check object status for selected flag state
-static VkBool32 validate_status(layer_data* my_data, GLOBAL_CB_NODE* pNode, CBStatusFlags enable_mask, CBStatusFlags status_mask, CBStatusFlags status_flag, VkFlags msg_flags, DRAW_STATE_ERROR error_code, const char* fail_msg)
-{
- // If non-zero enable mask is present, check it against status but if enable_mask
+static VkBool32 validate_status(layer_data *my_data, GLOBAL_CB_NODE *pNode,
+ CBStatusFlags enable_mask,
+ CBStatusFlags status_mask,
+ CBStatusFlags status_flag, VkFlags msg_flags,
+ DRAW_STATE_ERROR error_code,
+ const char *fail_msg) {
+ // If non-zero enable mask is present, check it against status but if
+ // enable_mask
// is 0 then no enable required so we should always just check status
if ((!enable_mask) || (enable_mask & pNode->status)) {
if ((pNode->status & status_mask) != status_flag) {
- // TODO : How to pass dispatchable objects as srcObject? Here src obj should be cmd buffer
- return log_msg(my_data->report_data, msg_flags, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, error_code, "DS",
- "CB object %#" PRIxLEAST64 ": %s", (uint64_t)(pNode->commandBuffer), fail_msg);
+ // TODO : How to pass dispatchable objects as srcObject? Here src
+ // obj should be cmd buffer
+ return log_msg(my_data->report_data, msg_flags,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+ __LINE__, error_code, "DS",
+ "CB object %#" PRIxLEAST64 ": %s",
+ (uint64_t)(pNode->commandBuffer), fail_msg);
}
}
return VK_FALSE;
}
// Retrieve pipeline node ptr for given pipeline object
-static PIPELINE_NODE* getPipeline(layer_data* my_data, const VkPipeline pipeline)
-{
+static PIPELINE_NODE *getPipeline(layer_data *my_data,
+ const VkPipeline pipeline) {
loader_platform_thread_lock_mutex(&globalLock);
if (my_data->pipelineMap.find(pipeline) == my_data->pipelineMap.end()) {
loader_platform_thread_unlock_mutex(&globalLock);
@@ -1191,12 +1271,16 @@
return my_data->pipelineMap[pipeline];
}
-// Return VK_TRUE if for a given PSO, the given state enum is dynamic, else return VK_FALSE
-static VkBool32 isDynamic(const PIPELINE_NODE* pPipeline, const VkDynamicState state)
-{
+// Return VK_TRUE if for a given PSO, the given state enum is dynamic, else
+// return VK_FALSE
+static VkBool32 isDynamic(const PIPELINE_NODE *pPipeline,
+ const VkDynamicState state) {
if (pPipeline && pPipeline->graphicsPipelineCI.pDynamicState) {
- for (uint32_t i=0; i<pPipeline->graphicsPipelineCI.pDynamicState->dynamicStateCount; i++) {
- if (state == pPipeline->graphicsPipelineCI.pDynamicState->pDynamicStates[i])
+ for (uint32_t i = 0;
+ i < pPipeline->graphicsPipelineCI.pDynamicState->dynamicStateCount;
+ i++) {
+ if (state ==
+ pPipeline->graphicsPipelineCI.pDynamicState->pDynamicStates[i])
return VK_TRUE;
}
}
@@ -1204,54 +1288,110 @@
}
// Validate state stored as flags at time of draw call
-static VkBool32 validate_draw_state_flags(layer_data* my_data, GLOBAL_CB_NODE* pCB, VkBool32 indexedDraw) {
+static VkBool32 validate_draw_state_flags(layer_data *my_data,
+ GLOBAL_CB_NODE *pCB,
+ VkBool32 indexedDraw) {
VkBool32 result;
- result = validate_status(my_data, pCB, CBSTATUS_NONE, CBSTATUS_VIEWPORT_SET, CBSTATUS_VIEWPORT_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_VIEWPORT_NOT_BOUND, "Dynamic viewport state not set for this command buffer");
- result |= validate_status(my_data, pCB, CBSTATUS_NONE, CBSTATUS_SCISSOR_SET, CBSTATUS_SCISSOR_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_SCISSOR_NOT_BOUND, "Dynamic scissor state not set for this command buffer");
- result |= validate_status(my_data, pCB, CBSTATUS_NONE, CBSTATUS_LINE_WIDTH_SET, CBSTATUS_LINE_WIDTH_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_LINE_WIDTH_NOT_BOUND, "Dynamic line width state not set for this command buffer");
- result |= validate_status(my_data, pCB, CBSTATUS_NONE, CBSTATUS_DEPTH_BIAS_SET, CBSTATUS_DEPTH_BIAS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_DEPTH_BIAS_NOT_BOUND, "Dynamic depth bias state not set for this command buffer");
- result |= validate_status(my_data, pCB, CBSTATUS_COLOR_BLEND_WRITE_ENABLE, CBSTATUS_BLEND_SET, CBSTATUS_BLEND_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_BLEND_NOT_BOUND, "Dynamic blend object state not set for this command buffer");
- result |= validate_status(my_data, pCB, CBSTATUS_DEPTH_WRITE_ENABLE, CBSTATUS_DEPTH_BOUNDS_SET, CBSTATUS_DEPTH_BOUNDS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_DEPTH_BOUNDS_NOT_BOUND, "Dynamic depth bounds state not set for this command buffer");
- result |= validate_status(my_data, pCB, CBSTATUS_STENCIL_TEST_ENABLE, CBSTATUS_STENCIL_READ_MASK_SET, CBSTATUS_STENCIL_READ_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_STENCIL_NOT_BOUND, "Dynamic stencil read mask state not set for this command buffer");
- result |= validate_status(my_data, pCB, CBSTATUS_STENCIL_TEST_ENABLE, CBSTATUS_STENCIL_WRITE_MASK_SET, CBSTATUS_STENCIL_WRITE_MASK_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_STENCIL_NOT_BOUND, "Dynamic stencil write mask state not set for this command buffer");
- result |= validate_status(my_data, pCB, CBSTATUS_STENCIL_TEST_ENABLE, CBSTATUS_STENCIL_REFERENCE_SET, CBSTATUS_STENCIL_REFERENCE_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_STENCIL_NOT_BOUND, "Dynamic stencil reference state not set for this command buffer");
+ result = validate_status(
+ my_data, pCB, CBSTATUS_NONE, CBSTATUS_VIEWPORT_SET,
+ CBSTATUS_VIEWPORT_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ DRAWSTATE_VIEWPORT_NOT_BOUND,
+ "Dynamic viewport state not set for this command buffer");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_NONE, CBSTATUS_SCISSOR_SET, CBSTATUS_SCISSOR_SET,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_SCISSOR_NOT_BOUND,
+ "Dynamic scissor state not set for this command buffer");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_NONE, CBSTATUS_LINE_WIDTH_SET,
+ CBSTATUS_LINE_WIDTH_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ DRAWSTATE_LINE_WIDTH_NOT_BOUND,
+ "Dynamic line width state not set for this command buffer");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_NONE, CBSTATUS_DEPTH_BIAS_SET,
+ CBSTATUS_DEPTH_BIAS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ DRAWSTATE_DEPTH_BIAS_NOT_BOUND,
+ "Dynamic depth bias state not set for this command buffer");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_COLOR_BLEND_WRITE_ENABLE, CBSTATUS_BLEND_SET,
+ CBSTATUS_BLEND_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ DRAWSTATE_BLEND_NOT_BOUND,
+ "Dynamic blend object state not set for this command buffer");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_DEPTH_WRITE_ENABLE, CBSTATUS_DEPTH_BOUNDS_SET,
+ CBSTATUS_DEPTH_BOUNDS_SET, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ DRAWSTATE_DEPTH_BOUNDS_NOT_BOUND,
+ "Dynamic depth bounds state not set for this command buffer");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_STENCIL_TEST_ENABLE,
+ CBSTATUS_STENCIL_READ_MASK_SET, CBSTATUS_STENCIL_READ_MASK_SET,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_STENCIL_NOT_BOUND,
+ "Dynamic stencil read mask state not set for this command buffer");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_STENCIL_TEST_ENABLE,
+ CBSTATUS_STENCIL_WRITE_MASK_SET, CBSTATUS_STENCIL_WRITE_MASK_SET,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_STENCIL_NOT_BOUND,
+ "Dynamic stencil write mask state not set for this command buffer");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_STENCIL_TEST_ENABLE,
+ CBSTATUS_STENCIL_REFERENCE_SET, CBSTATUS_STENCIL_REFERENCE_SET,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_STENCIL_NOT_BOUND,
+ "Dynamic stencil reference state not set for this command buffer");
if (indexedDraw)
- result |= validate_status(my_data, pCB, CBSTATUS_NONE, CBSTATUS_INDEX_BUFFER_BOUND, CBSTATUS_INDEX_BUFFER_BOUND, VK_DEBUG_REPORT_ERROR_BIT_EXT, DRAWSTATE_INDEX_BUFFER_NOT_BOUND, "Index buffer object not bound to this command buffer when Indexed Draw attempted");
+ result |= validate_status(
+ my_data, pCB, CBSTATUS_NONE, CBSTATUS_INDEX_BUFFER_BOUND,
+ CBSTATUS_INDEX_BUFFER_BOUND, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ DRAWSTATE_INDEX_BUFFER_NOT_BOUND, "Index buffer object not bound "
+ "to this command buffer when "
+ "Indexed Draw attempted");
return result;
}
// Verify attachment reference compatibility according to spec
-// If one array is larger, treat missing elements of shorter array as VK_ATTACHMENT_UNUSED & other array much match this
-// If both AttachmentReference arrays have requested index, check their corresponding AttachementDescriptions
+// If one array is larger, treat missing elements of shorter array as
+// VK_ATTACHMENT_UNUSED & other array much match this
+// If both AttachmentReference arrays have requested index, check their
+// corresponding AttachementDescriptions
// to make sure that format and samples counts match.
// If not, they are not compatible.
-static bool attachment_references_compatible(const uint32_t index, const VkAttachmentReference* pPrimary, const uint32_t primaryCount, const VkAttachmentDescription* pPrimaryAttachments,
- const VkAttachmentReference* pSecondary, const uint32_t secondaryCount, const VkAttachmentDescription* pSecondaryAttachments)
-{
- if (index >= primaryCount) { // Check secondary as if primary is VK_ATTACHMENT_UNUSED
+static bool attachment_references_compatible(
+ const uint32_t index, const VkAttachmentReference *pPrimary,
+ const uint32_t primaryCount,
+ const VkAttachmentDescription *pPrimaryAttachments,
+ const VkAttachmentReference *pSecondary, const uint32_t secondaryCount,
+ const VkAttachmentDescription *pSecondaryAttachments) {
+ if (index >=
+ primaryCount) { // Check secondary as if primary is VK_ATTACHMENT_UNUSED
if (VK_ATTACHMENT_UNUSED != pSecondary[index].attachment)
return false;
- } else if (index >= secondaryCount) { // Check primary as if secondary is VK_ATTACHMENT_UNUSED
+ } else if (index >= secondaryCount) { // Check primary as if secondary is
+ // VK_ATTACHMENT_UNUSED
if (VK_ATTACHMENT_UNUSED != pPrimary[index].attachment)
return false;
} else { // format and sample count must match
- if ((pPrimaryAttachments[pPrimary[index].attachment].format == pSecondaryAttachments[pSecondary[index].attachment].format) &&
- (pPrimaryAttachments[pPrimary[index].attachment].samples == pSecondaryAttachments[pSecondary[index].attachment].samples))
+ if ((pPrimaryAttachments[pPrimary[index].attachment].format ==
+ pSecondaryAttachments[pSecondary[index].attachment].format) &&
+ (pPrimaryAttachments[pPrimary[index].attachment].samples ==
+ pSecondaryAttachments[pSecondary[index].attachment].samples))
return true;
}
// Format and sample counts didn't match
return false;
}
-// For give primary and secondary RenderPass objects, verify that they're compatible
-static bool verify_renderpass_compatibility(layer_data* my_data, const VkRenderPass primaryRP, const VkRenderPass secondaryRP, string& errorMsg)
-{
+// For give primary and secondary RenderPass objects, verify that they're
+// compatible
+static bool verify_renderpass_compatibility(layer_data *my_data,
+ const VkRenderPass primaryRP,
+ const VkRenderPass secondaryRP,
+ string &errorMsg) {
stringstream errorStr;
- if (my_data->renderPassMap.find(primaryRP) == my_data->renderPassMap.end()) {
+ if (my_data->renderPassMap.find(primaryRP) ==
+ my_data->renderPassMap.end()) {
errorStr << "invalid VkRenderPass (" << primaryRP << ")";
errorMsg = errorStr.str();
return false;
- } else if (my_data->renderPassMap.find(secondaryRP) == my_data->renderPassMap.end()) {
+ } else if (my_data->renderPassMap.find(secondaryRP) ==
+ my_data->renderPassMap.end()) {
errorStr << "invalid VkRenderPass (" << secondaryRP << ")";
errorMsg = errorStr.str();
return false;
@@ -1259,44 +1399,78 @@
// Trivial pass case is exact same RP
if (primaryRP == secondaryRP)
return true;
- const VkRenderPassCreateInfo* primaryRPCI = my_data->renderPassMap[primaryRP]->pCreateInfo;
- const VkRenderPassCreateInfo* secondaryRPCI = my_data->renderPassMap[secondaryRP]->pCreateInfo;
+ const VkRenderPassCreateInfo *primaryRPCI =
+ my_data->renderPassMap[primaryRP]->pCreateInfo;
+ const VkRenderPassCreateInfo *secondaryRPCI =
+ my_data->renderPassMap[secondaryRP]->pCreateInfo;
if (primaryRPCI->subpassCount != secondaryRPCI->subpassCount) {
- errorStr << "RenderPass for primary cmdBuffer has " << primaryRPCI->subpassCount << " subpasses but renderPass for secondary cmdBuffer has " << secondaryRPCI->subpassCount << " subpasses.";
+ errorStr << "RenderPass for primary cmdBuffer has "
+ << primaryRPCI->subpassCount
+ << " subpasses but renderPass for secondary cmdBuffer has "
+ << secondaryRPCI->subpassCount << " subpasses.";
errorMsg = errorStr.str();
return false;
}
uint32_t spIndex = 0;
for (spIndex = 0; spIndex < primaryRPCI->subpassCount; ++spIndex) {
- // For each subpass, verify that corresponding color, input, resolve & depth/stencil attachment references are compatible
- uint32_t primaryColorCount = primaryRPCI->pSubpasses[spIndex].colorAttachmentCount;
- uint32_t secondaryColorCount = secondaryRPCI->pSubpasses[spIndex].colorAttachmentCount;
+ // For each subpass, verify that corresponding color, input, resolve &
+ // depth/stencil attachment references are compatible
+ uint32_t primaryColorCount =
+ primaryRPCI->pSubpasses[spIndex].colorAttachmentCount;
+ uint32_t secondaryColorCount =
+ secondaryRPCI->pSubpasses[spIndex].colorAttachmentCount;
uint32_t colorMax = std::max(primaryColorCount, secondaryColorCount);
for (uint32_t cIdx = 0; cIdx < colorMax; ++cIdx) {
- if (!attachment_references_compatible(cIdx, primaryRPCI->pSubpasses[spIndex].pColorAttachments, primaryColorCount, primaryRPCI->pAttachments,
- secondaryRPCI->pSubpasses[spIndex].pColorAttachments, secondaryColorCount, secondaryRPCI->pAttachments)) {
- errorStr << "color attachments at index " << cIdx << " of subpass index " << spIndex << " are not compatible.";
+ if (!attachment_references_compatible(
+ cIdx, primaryRPCI->pSubpasses[spIndex].pColorAttachments,
+ primaryColorCount, primaryRPCI->pAttachments,
+ secondaryRPCI->pSubpasses[spIndex].pColorAttachments,
+ secondaryColorCount, secondaryRPCI->pAttachments)) {
+ errorStr << "color attachments at index " << cIdx
+ << " of subpass index " << spIndex
+ << " are not compatible.";
errorMsg = errorStr.str();
return false;
- } else if (!attachment_references_compatible(cIdx, primaryRPCI->pSubpasses[spIndex].pResolveAttachments, primaryColorCount, primaryRPCI->pAttachments,
- secondaryRPCI->pSubpasses[spIndex].pResolveAttachments, secondaryColorCount, secondaryRPCI->pAttachments)) {
- errorStr << "resolve attachments at index " << cIdx << " of subpass index " << spIndex << " are not compatible.";
+ } else if (!attachment_references_compatible(
+ cIdx,
+ primaryRPCI->pSubpasses[spIndex].pResolveAttachments,
+ primaryColorCount, primaryRPCI->pAttachments,
+ secondaryRPCI->pSubpasses[spIndex]
+ .pResolveAttachments,
+ secondaryColorCount, secondaryRPCI->pAttachments)) {
+ errorStr << "resolve attachments at index " << cIdx
+ << " of subpass index " << spIndex
+ << " are not compatible.";
errorMsg = errorStr.str();
return false;
- } else if (!attachment_references_compatible(cIdx, primaryRPCI->pSubpasses[spIndex].pDepthStencilAttachment, primaryColorCount, primaryRPCI->pAttachments,
- secondaryRPCI->pSubpasses[spIndex].pDepthStencilAttachment, secondaryColorCount, secondaryRPCI->pAttachments)) {
- errorStr << "depth/stencil attachments at index " << cIdx << " of subpass index " << spIndex << " are not compatible.";
+ } else if (!attachment_references_compatible(
+ cIdx, primaryRPCI->pSubpasses[spIndex]
+ .pDepthStencilAttachment,
+ primaryColorCount, primaryRPCI->pAttachments,
+ secondaryRPCI->pSubpasses[spIndex]
+ .pDepthStencilAttachment,
+ secondaryColorCount, secondaryRPCI->pAttachments)) {
+ errorStr << "depth/stencil attachments at index " << cIdx
+ << " of subpass index " << spIndex
+ << " are not compatible.";
errorMsg = errorStr.str();
return false;
}
}
- uint32_t primaryInputCount = primaryRPCI->pSubpasses[spIndex].inputAttachmentCount;
- uint32_t secondaryInputCount = secondaryRPCI->pSubpasses[spIndex].inputAttachmentCount;
+ uint32_t primaryInputCount =
+ primaryRPCI->pSubpasses[spIndex].inputAttachmentCount;
+ uint32_t secondaryInputCount =
+ secondaryRPCI->pSubpasses[spIndex].inputAttachmentCount;
uint32_t inputMax = std::max(primaryInputCount, secondaryInputCount);
for (uint32_t i = 0; i < inputMax; ++i) {
- if (!attachment_references_compatible(i, primaryRPCI->pSubpasses[spIndex].pInputAttachments, primaryColorCount, primaryRPCI->pAttachments,
- secondaryRPCI->pSubpasses[spIndex].pInputAttachments, secondaryColorCount, secondaryRPCI->pAttachments)) {
- errorStr << "input attachments at index " << i << " of subpass index " << spIndex << " are not compatible.";
+ if (!attachment_references_compatible(
+ i, primaryRPCI->pSubpasses[spIndex].pInputAttachments,
+ primaryColorCount, primaryRPCI->pAttachments,
+ secondaryRPCI->pSubpasses[spIndex].pInputAttachments,
+ secondaryColorCount, secondaryRPCI->pAttachments)) {
+ errorStr << "input attachments at index " << i
+ << " of subpass index " << spIndex
+ << " are not compatible.";
errorMsg = errorStr.str();
return false;
}
@@ -1305,44 +1479,72 @@
return true;
}
-// For give SET_NODE, verify that its Set is compatible w/ the setLayout corresponding to pipelineLayout[layoutIndex]
-static bool verify_set_layout_compatibility(layer_data* my_data, const SET_NODE* pSet, const VkPipelineLayout layout, const uint32_t layoutIndex, string& errorMsg)
-{
+// For give SET_NODE, verify that its Set is compatible w/ the setLayout
+// corresponding to pipelineLayout[layoutIndex]
+static bool verify_set_layout_compatibility(layer_data *my_data,
+ const SET_NODE *pSet,
+ const VkPipelineLayout layout,
+ const uint32_t layoutIndex,
+ string &errorMsg) {
stringstream errorStr;
- if (my_data->pipelineLayoutMap.find(layout) == my_data->pipelineLayoutMap.end()) {
+ if (my_data->pipelineLayoutMap.find(layout) ==
+ my_data->pipelineLayoutMap.end()) {
errorStr << "invalid VkPipelineLayout (" << layout << ")";
errorMsg = errorStr.str();
return false;
}
PIPELINE_LAYOUT_NODE pl = my_data->pipelineLayoutMap[layout];
if (layoutIndex >= pl.descriptorSetLayouts.size()) {
- errorStr << "VkPipelineLayout (" << layout << ") only contains " << pl.descriptorSetLayouts.size() << " setLayouts corresponding to sets 0-" << pl.descriptorSetLayouts.size()-1 << ", but you're attempting to bind set to index " << layoutIndex;
+ errorStr << "VkPipelineLayout (" << layout << ") only contains "
+ << pl.descriptorSetLayouts.size()
+ << " setLayouts corresponding to sets 0-"
+ << pl.descriptorSetLayouts.size() - 1
+ << ", but you're attempting to bind set to index "
+ << layoutIndex;
errorMsg = errorStr.str();
return false;
}
// Get the specific setLayout from PipelineLayout that overlaps this set
- LAYOUT_NODE* pLayoutNode = my_data->descriptorSetLayoutMap[pl.descriptorSetLayouts[layoutIndex]];
+ LAYOUT_NODE *pLayoutNode =
+ my_data->descriptorSetLayoutMap[pl.descriptorSetLayouts[layoutIndex]];
if (pLayoutNode->layout == pSet->pLayout->layout) { // trivial pass case
return true;
}
size_t descriptorCount = pLayoutNode->descriptorTypes.size();
if (descriptorCount != pSet->pLayout->descriptorTypes.size()) {
- errorStr << "setLayout " << layoutIndex << " from pipelineLayout " << layout << " has " << descriptorCount << " descriptors, but corresponding set being bound has " << pSet->pLayout->descriptorTypes.size() << " descriptors.";
+ errorStr << "setLayout " << layoutIndex << " from pipelineLayout "
+ << layout << " has " << descriptorCount
+ << " descriptors, but corresponding set being bound has "
+ << pSet->pLayout->descriptorTypes.size() << " descriptors.";
errorMsg = errorStr.str();
return false; // trivial fail case
}
- // Now need to check set against corresponding pipelineLayout to verify compatibility
- for (size_t i=0; i<descriptorCount; ++i) {
+ // Now need to check set against corresponding pipelineLayout to verify
+ // compatibility
+ for (size_t i = 0; i < descriptorCount; ++i) {
// Need to verify that layouts are identically defined
- // TODO : Is below sufficient? Making sure that types & stageFlags match per descriptor
+ // TODO : Is below sufficient? Making sure that types & stageFlags
+ // match per descriptor
// do we also need to check immutable samplers?
- if (pLayoutNode->descriptorTypes[i] != pSet->pLayout->descriptorTypes[i]) {
- errorStr << "descriptor " << i << " for descriptorSet being bound is type '" << string_VkDescriptorType(pSet->pLayout->descriptorTypes[i]) << "' but corresponding descriptor from pipelineLayout is type '" << string_VkDescriptorType(pLayoutNode->descriptorTypes[i]) << "'";
+ if (pLayoutNode->descriptorTypes[i] !=
+ pSet->pLayout->descriptorTypes[i]) {
+ errorStr << "descriptor " << i
+ << " for descriptorSet being bound is type '"
+ << string_VkDescriptorType(
+ pSet->pLayout->descriptorTypes[i])
+ << "' but corresponding descriptor from pipelineLayout is "
+ "type '"
+ << string_VkDescriptorType(pLayoutNode->descriptorTypes[i])
+ << "'";
errorMsg = errorStr.str();
return false;
}
if (pLayoutNode->stageFlags[i] != pSet->pLayout->stageFlags[i]) {
- errorStr << "stageFlags " << i << " for descriptorSet being bound is " << pSet->pLayout->stageFlags[i] << "' but corresponding descriptor from pipelineLayout has stageFlags " << pLayoutNode->stageFlags[i];
+ errorStr << "stageFlags " << i
+ << " for descriptorSet being bound is "
+ << pSet->pLayout->stageFlags[i]
+ << "' but corresponding descriptor from pipelineLayout "
+ "has stageFlags " << pLayoutNode->stageFlags[i];
errorMsg = errorStr.str();
return false;
}
@@ -1351,60 +1553,85 @@
}
// Validate that the shaders used by the given pipeline
-// As a side effect this function also records the sets that are actually used by the pipeline
-static VkBool32
-validate_pipeline_shaders(layer_data *my_data, VkDevice dev, PIPELINE_NODE* pPipeline)
-{
- VkGraphicsPipelineCreateInfo const *pCreateInfo = &pPipeline->graphicsPipelineCI;
- /* We seem to allow pipeline stages to be specified out of order, so collect and identify them
+// As a side effect this function also records the sets that are actually used
+// by the pipeline
+static VkBool32 validate_pipeline_shaders(layer_data *my_data, VkDevice dev,
+ PIPELINE_NODE *pPipeline) {
+ VkGraphicsPipelineCreateInfo const *pCreateInfo =
+ &pPipeline->graphicsPipelineCI;
+ /* We seem to allow pipeline stages to be specified out of order, so collect
+ * and identify them
* before trying to do anything more: */
int vertex_stage = get_shader_stage_id(VK_SHADER_STAGE_VERTEX_BIT);
int geometry_stage = get_shader_stage_id(VK_SHADER_STAGE_GEOMETRY_BIT);
int fragment_stage = get_shader_stage_id(VK_SHADER_STAGE_FRAGMENT_BIT);
- shader_module **shaders = new shader_module*[fragment_stage + 1]; /* exclude CS */
- memset(shaders, 0, sizeof(shader_module *) * (fragment_stage +1));
+ shader_module **shaders =
+ new shader_module *[fragment_stage + 1]; /* exclude CS */
+ memset(shaders, 0, sizeof(shader_module *) * (fragment_stage + 1));
RENDER_PASS_NODE const *rp = 0;
VkPipelineVertexInputStateCreateInfo const *vi = 0;
VkBool32 pass = VK_TRUE;
for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
- VkPipelineShaderStageCreateInfo const *pStage = &pCreateInfo->pStages[i];
- if (pStage->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
+ VkPipelineShaderStageCreateInfo const *pStage =
+ &pCreateInfo->pStages[i];
+ if (pStage->sType ==
+ VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
- if ((pStage->stage & (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_FRAGMENT_BIT
- | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)) == 0) {
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__, SHADER_CHECKER_UNKNOWN_STAGE, "SC",
- "Unknown shader stage %d", pStage->stage)) {
+ if ((pStage->stage &
+ (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_GEOMETRY_BIT |
+ VK_SHADER_STAGE_FRAGMENT_BIT |
+ VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
+ VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)) == 0) {
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/ 0,
+ __LINE__, SHADER_CHECKER_UNKNOWN_STAGE, "SC",
+ "Unknown shader stage %d", pStage->stage)) {
pass = VK_FALSE;
}
- }
- else {
- shader_module *module = my_data->shaderModuleMap[pStage->module];
+ } else {
+ shader_module *module =
+ my_data->shaderModuleMap[pStage->module];
shaders[get_shader_stage_id(pStage->stage)] = module;
- /* validate descriptor set layout against what the spirv module actually uses */
- std::map<std::pair<unsigned, unsigned>, interface_var> descriptor_uses;
- collect_interface_by_descriptor_slot(my_data, dev, module, spv::StorageClassUniform,
- descriptor_uses);
+ /* validate descriptor set layout against what the spirv module
+ * actually uses */
+ std::map<std::pair<unsigned, unsigned>, interface_var>
+ descriptor_uses;
+ collect_interface_by_descriptor_slot(my_data, dev, module,
+ spv::StorageClassUniform,
+ descriptor_uses);
- auto layouts = pCreateInfo->layout != VK_NULL_HANDLE ?
- &(my_data->pipelineLayoutMap[pCreateInfo->layout].descriptorSetLayouts) : nullptr;
+ auto layouts =
+ pCreateInfo->layout != VK_NULL_HANDLE
+ ? &(my_data->pipelineLayoutMap[pCreateInfo->layout]
+ .descriptorSetLayouts)
+ : nullptr;
- for (auto it = descriptor_uses.begin(); it != descriptor_uses.end(); it++) {
- // As a side-effect of this function, capture which sets are used by the pipeline
+ for (auto it = descriptor_uses.begin();
+ it != descriptor_uses.end(); it++) {
+ // As a side-effect of this function, capture which sets are
+ // used by the pipeline
pPipeline->active_sets.insert(it->first.first);
/* find the matching binding */
- auto found = has_descriptor_binding(my_data, layouts, it->first);
+ auto found =
+ has_descriptor_binding(my_data, layouts, it->first);
if (!found) {
char type_name[1024];
describe_type(type_name, module, it->second.type_id);
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, /*dev*/0, __LINE__,
- SHADER_CHECKER_MISSING_DESCRIPTOR, "SC",
- "Shader uses descriptor slot %u.%u (used as type `%s`) but not declared in pipeline layout",
- it->first.first, it->first.second, type_name)) {
+ if (log_msg(my_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ /*dev*/ 0, __LINE__,
+ SHADER_CHECKER_MISSING_DESCRIPTOR, "SC",
+ "Shader uses descriptor slot %u.%u (used "
+ "as type `%s`) but not declared in "
+ "pipeline layout",
+ it->first.first, it->first.second,
+ type_name)) {
pass = VK_FALSE;
}
}
@@ -1423,7 +1650,9 @@
}
if (shaders[vertex_stage]) {
- pass = validate_vi_against_vs_inputs(my_data, dev, vi, shaders[vertex_stage]) && pass;
+ pass = validate_vi_against_vs_inputs(my_data, dev, vi,
+ shaders[vertex_stage]) &&
+ pass;
}
/* TODO: enforce rules about present combinations of shaders */
@@ -1435,30 +1664,35 @@
consumer++;
}
- for (; producer != fragment_stage && consumer <= fragment_stage; consumer++) {
+ for (; producer != fragment_stage && consumer <= fragment_stage;
+ consumer++) {
assert(shaders[producer]);
if (shaders[consumer]) {
- pass = validate_interface_between_stages(my_data, dev,
- shaders[producer], shader_stage_attribs[producer].name,
- shaders[consumer], shader_stage_attribs[consumer].name,
- shader_stage_attribs[consumer].arrayed_input) && pass;
+ pass = validate_interface_between_stages(
+ my_data, dev, shaders[producer],
+ shader_stage_attribs[producer].name, shaders[consumer],
+ shader_stage_attribs[consumer].name,
+ shader_stage_attribs[consumer].arrayed_input) &&
+ pass;
producer = consumer;
}
}
if (shaders[fragment_stage] && rp) {
- pass = validate_fs_outputs_against_render_pass(my_data, dev, shaders[fragment_stage], rp, pCreateInfo->subpass) && pass;
+ pass = validate_fs_outputs_against_render_pass(
+ my_data, dev, shaders[fragment_stage], rp,
+ pCreateInfo->subpass) &&
+ pass;
}
- delete [] shaders;
+ delete[] shaders;
return pass;
}
// Return Set node ptr for specified set or else NULL
-static SET_NODE* getSetNode(layer_data* my_data, const VkDescriptorSet set)
-{
+static SET_NODE *getSetNode(layer_data *my_data, const VkDescriptorSet set) {
loader_platform_thread_lock_mutex(&globalLock);
if (my_data->setMap.find(set) == my_data->setMap.end()) {
loader_platform_thread_unlock_mutex(&globalLock);
@@ -1471,35 +1705,61 @@
// that any dynamic descriptor in that set has a valid dynamic offset bound.
// To be valid, the dynamic offset combined with the offet and range from its
// descriptor update must not overflow the size of its buffer being updated
-static VkBool32 validate_dynamic_offsets(layer_data* my_data, const GLOBAL_CB_NODE* pCB, const vector<SET_NODE*> activeSetNodes)
-{
+static VkBool32
+validate_dynamic_offsets(layer_data *my_data, const GLOBAL_CB_NODE *pCB,
+ const vector<SET_NODE *> activeSetNodes) {
VkBool32 result = VK_FALSE;
- VkWriteDescriptorSet* pWDS = NULL;
+ VkWriteDescriptorSet *pWDS = NULL;
uint32_t dynOffsetIndex = 0;
VkDeviceSize bufferSize = 0;
for (auto set_node : activeSetNodes) {
- for (uint32_t i=0; i < set_node->descriptorCount; ++i) {
+ for (uint32_t i = 0; i < set_node->descriptorCount; ++i) {
switch (set_node->ppDescriptors[i]->sType) {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- pWDS = (VkWriteDescriptorSet*)set_node->ppDescriptors[i];
- if ((pWDS->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
- (pWDS->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
- for (uint32_t j=0; j<pWDS->descriptorCount; ++j) {
- bufferSize = my_data->bufferMap[pWDS->pBufferInfo[j].buffer].create_info->size;
- if ((pCB->dynamicOffsets[dynOffsetIndex] + pWDS->pBufferInfo[j].offset + pWDS->pBufferInfo[j].range) > bufferSize) {
- result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)set_node->set, __LINE__, DRAWSTATE_DYNAMIC_OFFSET_OVERFLOW, "DS",
- "VkDescriptorSet (%#" PRIxLEAST64 ") bound as set #%u has dynamic offset %u. Combined with offet %#" PRIxLEAST64 " and range %#" PRIxLEAST64 " from its update, this oversteps its buffer (%#" PRIxLEAST64 ") which has a size of %#" PRIxLEAST64 ".",
- (uint64_t)set_node->set, i, pCB->dynamicOffsets[dynOffsetIndex], pWDS->pBufferInfo[j].offset, pWDS->pBufferInfo[j].range, (uint64_t)pWDS->pBufferInfo[j].buffer, bufferSize);
- }
- dynOffsetIndex++;
- i += j; // Advance i to end of this set of descriptors (++i at end of for loop will move 1 index past last of these descriptors)
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ pWDS = (VkWriteDescriptorSet *)set_node->ppDescriptors[i];
+ if ((pWDS->descriptorType ==
+ VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
+ (pWDS->descriptorType ==
+ VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
+ for (uint32_t j = 0; j < pWDS->descriptorCount; ++j) {
+ bufferSize =
+ my_data->bufferMap[pWDS->pBufferInfo[j].buffer]
+ .create_info->size;
+ if ((pCB->dynamicOffsets[dynOffsetIndex] +
+ pWDS->pBufferInfo[j].offset +
+ pWDS->pBufferInfo[j].range) > bufferSize) {
+ result |= log_msg(
+ my_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)set_node->set, __LINE__,
+ DRAWSTATE_DYNAMIC_OFFSET_OVERFLOW, "DS",
+ "VkDescriptorSet (%#" PRIxLEAST64
+ ") bound as set #%u has dynamic offset %u. "
+ "Combined with offet %#" PRIxLEAST64
+ " and range %#" PRIxLEAST64
+ " from its update, this oversteps its buffer "
+ "(%#" PRIxLEAST64
+ ") which has a size of %#" PRIxLEAST64 ".",
+ (uint64_t)set_node->set, i,
+ pCB->dynamicOffsets[dynOffsetIndex],
+ pWDS->pBufferInfo[j].offset,
+ pWDS->pBufferInfo[j].range,
+ (uint64_t)pWDS->pBufferInfo[j].buffer,
+ bufferSize);
}
+ dynOffsetIndex++;
+ i += j; // Advance i to end of this set of descriptors
+ // (++i at end of for loop will move 1 index
+ // past last of these descriptors)
}
- break;
- default: // Currently only shadowing Write update nodes so shouldn't get here
- assert(0);
- continue;
+ }
+ break;
+ default: // Currently only shadowing Write update nodes so shouldn't
+ // get here
+ assert(0);
+ continue;
}
}
}
@@ -1507,78 +1767,148 @@
}
// Validate overall state at the time of a draw call
-static VkBool32 validate_draw_state(layer_data* my_data, GLOBAL_CB_NODE* pCB, VkBool32 indexedDraw) {
+static VkBool32 validate_draw_state(layer_data *my_data, GLOBAL_CB_NODE *pCB,
+ VkBool32 indexedDraw) {
// First check flag states
VkBool32 result = validate_draw_state_flags(my_data, pCB, indexedDraw);
- PIPELINE_NODE* pPipe = getPipeline(my_data, pCB->lastBoundPipeline);
+ PIPELINE_NODE *pPipe = getPipeline(my_data, pCB->lastBoundPipeline);
// Now complete other state checks
- // TODO : Currently only performing next check if *something* was bound (non-zero last bound)
- // There is probably a better way to gate when this check happens, and to know if something *should* have been bound
- // We should have that check separately and then gate this check based on that check
+ // TODO : Currently only performing next check if *something* was bound
+ // (non-zero last bound)
+ // There is probably a better way to gate when this check happens, and to
+ // know if something *should* have been bound
+ // We should have that check separately and then gate this check based on
+ // that check
if (pPipe) {
loader_platform_thread_lock_mutex(&globalLock);
if (pCB->lastBoundPipelineLayout) {
string errorString;
- // Need a vector (vs. std::set) of active Sets for dynamicOffset validation in case same set bound w/ different offsets
- vector<SET_NODE*> activeSetNodes;
+ // Need a vector (vs. std::set) of active Sets for dynamicOffset
+ // validation in case same set bound w/ different offsets
+ vector<SET_NODE *> activeSetNodes;
for (auto setIndex : pPipe->active_sets) {
// If valid set is not bound throw an error
- if ((pCB->boundDescriptorSets.size() <= setIndex) || (!pCB->boundDescriptorSets[setIndex])) {
- result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_DESCRIPTOR_SET_NOT_BOUND, "DS",
- "VkPipeline %#" PRIxLEAST64 " uses set #%u but that set is not bound.", (uint64_t)pPipe->pipeline, setIndex);
- } else if (!verify_set_layout_compatibility(my_data, my_data->setMap[pCB->boundDescriptorSets[setIndex]], pPipe->graphicsPipelineCI.layout, setIndex, errorString)) {
- // Set is bound but not compatible w/ overlapping pipelineLayout from PSO
- VkDescriptorSet setHandle = my_data->setMap[pCB->boundDescriptorSets[setIndex]]->set;
- result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)setHandle, __LINE__, DRAWSTATE_PIPELINE_LAYOUTS_INCOMPATIBLE, "DS",
- "VkDescriptorSet (%#" PRIxLEAST64 ") bound as set #%u is not compatible with overlapping VkPipelineLayout %#" PRIxLEAST64 " due to: %s",
- (uint64_t)setHandle, setIndex, (uint64_t)pPipe->graphicsPipelineCI.layout, errorString.c_str());
- } else { // Valid set is bound and layout compatible, validate that it's updated and verify any dynamic offsets
+ if ((pCB->boundDescriptorSets.size() <= setIndex) ||
+ (!pCB->boundDescriptorSets[setIndex])) {
+ result |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_DESCRIPTOR_SET_NOT_BOUND, "DS",
+ "VkPipeline %#" PRIxLEAST64
+ " uses set #%u but that set is not bound.",
+ (uint64_t)pPipe->pipeline, setIndex);
+ } else if (!verify_set_layout_compatibility(
+ my_data,
+ my_data
+ ->setMap[pCB->boundDescriptorSets[setIndex]],
+ pPipe->graphicsPipelineCI.layout, setIndex,
+ errorString)) {
+ // Set is bound but not compatible w/ overlapping
+ // pipelineLayout from PSO
+ VkDescriptorSet setHandle =
+ my_data->setMap[pCB->boundDescriptorSets[setIndex]]
+ ->set;
+ result |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)setHandle, __LINE__,
+ DRAWSTATE_PIPELINE_LAYOUTS_INCOMPATIBLE, "DS",
+ "VkDescriptorSet (%#" PRIxLEAST64
+ ") bound as set #%u is not compatible with overlapping "
+ "VkPipelineLayout %#" PRIxLEAST64 " due to: %s",
+ (uint64_t)setHandle, setIndex,
+ (uint64_t)pPipe->graphicsPipelineCI.layout,
+ errorString.c_str());
+ } else { // Valid set is bound and layout compatible, validate
+ // that it's updated and verify any dynamic offsets
// Pull the set node
- SET_NODE* pSet = my_data->setMap[pCB->boundDescriptorSets[setIndex]];
- // Save vector of all active sets to verify dynamicOffsets below
+ SET_NODE *pSet =
+ my_data->setMap[pCB->boundDescriptorSets[setIndex]];
+ // Save vector of all active sets to verify dynamicOffsets
+ // below
activeSetNodes.push_back(pSet);
// Make sure set has been updated
if (!pSet->pUpdateStructs) {
- result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pSet->set, __LINE__, DRAWSTATE_DESCRIPTOR_SET_NOT_UPDATED, "DS",
- "DS %#" PRIxLEAST64 " bound but it was never updated. It is now being used to draw so this will result in undefined behavior.", (uint64_t) pSet->set);
+ result |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pSet->set, __LINE__,
+ DRAWSTATE_DESCRIPTOR_SET_NOT_UPDATED, "DS",
+ "DS %#" PRIxLEAST64 " bound but it was never "
+ "updated. It is now being used "
+ "to draw so this will result "
+ "in undefined behavior.",
+ (uint64_t)pSet->set);
}
}
}
- // For each dynamic descriptor, make sure dynamic offset doesn't overstep buffer
+ // For each dynamic descriptor, make sure dynamic offset doesn't
+ // overstep buffer
if (!pCB->dynamicOffsets.empty())
- result |= validate_dynamic_offsets(my_data, pCB, activeSetNodes);
+ result |=
+ validate_dynamic_offsets(my_data, pCB, activeSetNodes);
}
// Verify Vtx binding
if (pPipe->vtxBindingCount > 0) {
- VkPipelineVertexInputStateCreateInfo *vtxInCI = &pPipe->vertexInputCI;
- for (uint32_t i = 0; i < vtxInCI->vertexBindingDescriptionCount; i++) {
- if ((pCB->currentDrawData.buffers.size() < (i+1)) || (pCB->currentDrawData.buffers[i] == VK_NULL_HANDLE)) {
- result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VTX_INDEX_OUT_OF_BOUNDS, "DS",
- "The Pipeline State Object (%#" PRIxLEAST64 ") expects that this Command Buffer's vertex binding Index %d should be set via vkCmdBindVertexBuffers.",
+ VkPipelineVertexInputStateCreateInfo *vtxInCI =
+ &pPipe->vertexInputCI;
+ for (uint32_t i = 0; i < vtxInCI->vertexBindingDescriptionCount;
+ i++) {
+ if ((pCB->currentDrawData.buffers.size() < (i + 1)) ||
+ (pCB->currentDrawData.buffers[i] == VK_NULL_HANDLE)) {
+ result |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VTX_INDEX_OUT_OF_BOUNDS, "DS",
+ "The Pipeline State Object (%#" PRIxLEAST64
+ ") expects that this Command Buffer's vertex binding "
+ "Index %d should be set via vkCmdBindVertexBuffers.",
(uint64_t)pCB->lastBoundPipeline, i);
-
}
}
} else {
if (!pCB->currentDrawData.buffers.empty()) {
- result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VTX_INDEX_OUT_OF_BOUNDS,
- "DS", "Vertex buffers are bound to command buffer (%#" PRIxLEAST64 ") but no vertex buffers are attached to this Pipeline State Object (%#" PRIxLEAST64 ").",
- (uint64_t)pCB->commandBuffer, (uint64_t)pCB->lastBoundPipeline);
+ result |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VTX_INDEX_OUT_OF_BOUNDS, "DS",
+ "Vertex buffers are bound to command buffer (%#" PRIxLEAST64
+ ") but no vertex buffers are attached to this Pipeline "
+ "State Object (%#" PRIxLEAST64 ").",
+ (uint64_t)pCB->commandBuffer,
+ (uint64_t)pCB->lastBoundPipeline);
}
}
- // If Viewport or scissors are dynamic, verify that dynamic count matches PSO count
+ // If Viewport or scissors are dynamic, verify that dynamic count
+ // matches PSO count
VkBool32 dynViewport = isDynamic(pPipe, VK_DYNAMIC_STATE_VIEWPORT);
VkBool32 dynScissor = isDynamic(pPipe, VK_DYNAMIC_STATE_SCISSOR);
if (dynViewport) {
- if (pCB->viewports.size() != pPipe->graphicsPipelineCI.pViewportState->viewportCount) {
- result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
- "Dynamic viewportCount from vkCmdSetViewport() is " PRINTF_SIZE_T_SPECIFIER ", but PSO viewportCount is %u. These counts must match.", pCB->viewports.size(), pPipe->graphicsPipelineCI.pViewportState->viewportCount);
+ if (pCB->viewports.size() !=
+ pPipe->graphicsPipelineCI.pViewportState->viewportCount) {
+ result |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
+ "Dynamic viewportCount from vkCmdSetViewport() "
+ "is " PRINTF_SIZE_T_SPECIFIER
+ ", but PSO viewportCount is %u. These counts must match.",
+ pCB->viewports.size(),
+ pPipe->graphicsPipelineCI.pViewportState->viewportCount);
}
}
if (dynScissor) {
- if (pCB->scissors.size() != pPipe->graphicsPipelineCI.pViewportState->scissorCount) {
- result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
- "Dynamic scissorCount from vkCmdSetScissor() is " PRINTF_SIZE_T_SPECIFIER ", but PSO scissorCount is %u. These counts must match.", pCB->scissors.size(), pPipe->graphicsPipelineCI.pViewportState->scissorCount);
+ if (pCB->scissors.size() !=
+ pPipe->graphicsPipelineCI.pViewportState->scissorCount) {
+ result |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
+ "Dynamic scissorCount from vkCmdSetScissor() "
+ "is " PRINTF_SIZE_T_SPECIFIER
+ ", but PSO scissorCount is %u. These counts must match.",
+ pCB->scissors.size(),
+ pPipe->graphicsPipelineCI.pViewportState->scissorCount);
}
}
loader_platform_thread_unlock_mutex(&globalLock);
@@ -1587,8 +1917,9 @@
}
// Verify that create state for a pipeline is valid
-static VkBool32 verifyPipelineCreateState(layer_data* my_data, const VkDevice device, PIPELINE_NODE* pPipeline)
-{
+static VkBool32 verifyPipelineCreateState(layer_data *my_data,
+ const VkDevice device,
+ PIPELINE_NODE *pPipeline) {
VkBool32 skipCall = VK_FALSE;
if (!validate_pipeline_shaders(my_data, device, pPipeline)) {
@@ -1596,63 +1927,128 @@
}
// VS is required
if (!(pPipeline->active_shaders & VK_SHADER_STAGE_VERTEX_BIT)) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
- "Invalid Pipeline CreateInfo State: Vtx Shader required");
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
+ "Invalid Pipeline CreateInfo State: Vtx Shader required");
}
// Either both or neither TC/TE shaders should be defined
- if (((pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) == 0) !=
- ((pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) == 0) ) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
- "Invalid Pipeline CreateInfo State: TE and TC shaders must be included or excluded as a pair");
+ if (((pPipeline->active_shaders &
+ VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) == 0) !=
+ ((pPipeline->active_shaders &
+ VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) == 0)) {
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
+ "Invalid Pipeline CreateInfo State: TE and TC "
+ "shaders must be included or excluded as a pair");
}
// Compute shaders should be specified independent of Gfx shaders
if ((pPipeline->active_shaders & VK_SHADER_STAGE_COMPUTE_BIT) &&
- (pPipeline->active_shaders & (VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
- VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT | VK_SHADER_STAGE_GEOMETRY_BIT |
- VK_SHADER_STAGE_FRAGMENT_BIT))) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
- "Invalid Pipeline CreateInfo State: Do not specify Compute Shader for Gfx Pipeline");
+ (pPipeline->active_shaders &
+ (VK_SHADER_STAGE_VERTEX_BIT |
+ VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
+ VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT |
+ VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_FRAGMENT_BIT))) {
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
+ "Invalid Pipeline CreateInfo State: Do not specify "
+ "Compute Shader for Gfx Pipeline");
}
- // VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid for tessellation pipelines.
- // Mismatching primitive topology and tessellation fails graphics pipeline creation.
- if (pPipeline->active_shaders & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) &&
+ // VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid for
+ // tessellation pipelines.
+ // Mismatching primitive topology and tessellation fails graphics pipeline
+ // creation.
+ if (pPipeline->active_shaders &
+ (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
+ VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) &&
(pPipeline->iaStateCI.topology != VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
- "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST must be set as IA topology for tessellation pipelines");
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
+ "Invalid Pipeline CreateInfo State: "
+ "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST must be set as "
+ "IA topology for tessellation pipelines");
}
if (pPipeline->iaStateCI.topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
- if (~pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
- "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology is only valid for tessellation pipelines");
+ if (~pPipeline->active_shaders &
+ VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
+ "Invalid Pipeline CreateInfo State: "
+ "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology "
+ "is only valid for tessellation pipelines");
}
- if (!pPipeline->tessStateCI.patchControlPoints || (pPipeline->tessStateCI.patchControlPoints > 32)) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
- "Invalid Pipeline CreateInfo State: VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology used with patchControlPoints value %u."
- " patchControlPoints should be >0 and <=32.", pPipeline->tessStateCI.patchControlPoints);
+ if (!pPipeline->tessStateCI.patchControlPoints ||
+ (pPipeline->tessStateCI.patchControlPoints > 32)) {
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, "DS",
+ "Invalid Pipeline CreateInfo State: "
+ "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive topology "
+ "used with patchControlPoints value %u."
+ " patchControlPoints should be >0 and <=32.",
+ pPipeline->tessStateCI.patchControlPoints);
}
}
- // Viewport state must be included and viewport and scissor counts should always match
- // NOTE : Even if these are flagged as dynamic, counts need to be set correctly for shader compiler
+ // Viewport state must be included and viewport and scissor counts should
+ // always match
+ // NOTE : Even if these are flagged as dynamic, counts need to be set
+ // correctly for shader compiler
if (!pPipeline->graphicsPipelineCI.pViewportState) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
- "Gfx Pipeline pViewportState is null. Even if viewport and scissors are dynamic PSO must include viewportCount and scissorCount in pViewportState.");
- } else if (pPipeline->graphicsPipelineCI.pViewportState->scissorCount != pPipeline->graphicsPipelineCI.pViewportState->viewportCount) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
- "Gfx Pipeline viewport count (%u) must match scissor count (%u).", pPipeline->vpStateCI.viewportCount, pPipeline->vpStateCI.scissorCount);
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
+ "Gfx Pipeline pViewportState is null. Even if viewport and "
+ "scissors are dynamic PSO must include viewportCount and "
+ "scissorCount in pViewportState.");
+ } else if (pPipeline->graphicsPipelineCI.pViewportState->scissorCount !=
+ pPipeline->graphicsPipelineCI.pViewportState->viewportCount) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
+ "Gfx Pipeline viewport count (%u) must match scissor count (%u).",
+ pPipeline->vpStateCI.viewportCount,
+ pPipeline->vpStateCI.scissorCount);
} else {
- // If viewport or scissor are not dynamic, then verify that data is appropriate for count
+ // If viewport or scissor are not dynamic, then verify that data is
+ // appropriate for count
VkBool32 dynViewport = isDynamic(pPipeline, VK_DYNAMIC_STATE_VIEWPORT);
VkBool32 dynScissor = isDynamic(pPipeline, VK_DYNAMIC_STATE_SCISSOR);
if (!dynViewport) {
- if (pPipeline->graphicsPipelineCI.pViewportState->viewportCount && !pPipeline->graphicsPipelineCI.pViewportState->pViewports) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
- "Gfx Pipeline viewportCount is %u, but pViewports is NULL. For non-zero viewportCount, you must either include pViewports data, or include viewport in pDynamicState and set it with vkCmdSetViewport().", pPipeline->graphicsPipelineCI.pViewportState->viewportCount);
+ if (pPipeline->graphicsPipelineCI.pViewportState->viewportCount &&
+ !pPipeline->graphicsPipelineCI.pViewportState->pViewports) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
+ "Gfx Pipeline viewportCount is %u, but pViewports is NULL. "
+ "For non-zero viewportCount, you must either include "
+ "pViewports data, or include viewport in pDynamicState and "
+ "set it with vkCmdSetViewport().",
+ pPipeline->graphicsPipelineCI.pViewportState
+ ->viewportCount);
}
}
if (!dynScissor) {
- if (pPipeline->graphicsPipelineCI.pViewportState->scissorCount && !pPipeline->graphicsPipelineCI.pViewportState->pScissors) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
- "Gfx Pipeline scissorCount is %u, but pScissors is NULL. For non-zero scissorCount, you must either include pScissors data, or include scissor in pDynamicState and set it with vkCmdSetScissor().", pPipeline->graphicsPipelineCI.pViewportState->scissorCount);
+ if (pPipeline->graphicsPipelineCI.pViewportState->scissorCount &&
+ !pPipeline->graphicsPipelineCI.pViewportState->pScissors) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, "DS",
+ "Gfx Pipeline scissorCount is %u, but pScissors is NULL. "
+ "For non-zero scissorCount, you must either include "
+ "pScissors data, or include scissor in pDynamicState and "
+ "set it with vkCmdSetScissor().",
+ pPipeline->graphicsPipelineCI.pViewportState->scissorCount);
}
}
}
@@ -1662,120 +2058,164 @@
// Init the pipeline mapping info based on pipeline create info LL tree
// Threading note : Calls to this function should wrapped in mutex
// TODO : this should really just be in the constructor for PIPELINE_NODE
-static PIPELINE_NODE* initGraphicsPipeline(layer_data* dev_data, const VkGraphicsPipelineCreateInfo* pCreateInfo, PIPELINE_NODE* pBasePipeline)
-{
- PIPELINE_NODE* pPipeline = new PIPELINE_NODE;
+static PIPELINE_NODE *
+initGraphicsPipeline(layer_data *dev_data,
+ const VkGraphicsPipelineCreateInfo *pCreateInfo,
+ PIPELINE_NODE *pBasePipeline) {
+ PIPELINE_NODE *pPipeline = new PIPELINE_NODE;
if (pBasePipeline) {
*pPipeline = *pBasePipeline;
}
// First init create info
- memcpy(&pPipeline->graphicsPipelineCI, pCreateInfo, sizeof(VkGraphicsPipelineCreateInfo));
+ memcpy(&pPipeline->graphicsPipelineCI, pCreateInfo,
+ sizeof(VkGraphicsPipelineCreateInfo));
size_t bufferSize = 0;
- const VkPipelineVertexInputStateCreateInfo* pVICI = NULL;
- const VkPipelineColorBlendStateCreateInfo* pCBCI = NULL;
+ const VkPipelineVertexInputStateCreateInfo *pVICI = NULL;
+ const VkPipelineColorBlendStateCreateInfo *pCBCI = NULL;
for (uint32_t i = 0; i < pCreateInfo->stageCount; i++) {
- const VkPipelineShaderStageCreateInfo *pPSSCI = &pCreateInfo->pStages[i];
+ const VkPipelineShaderStageCreateInfo *pPSSCI =
+ &pCreateInfo->pStages[i];
switch (pPSSCI->stage) {
- case VK_SHADER_STAGE_VERTEX_BIT:
- memcpy(&pPipeline->vsCI, pPSSCI, sizeof(VkPipelineShaderStageCreateInfo));
- pPipeline->active_shaders |= VK_SHADER_STAGE_VERTEX_BIT;
- break;
- case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
- memcpy(&pPipeline->tcsCI, pPSSCI, sizeof(VkPipelineShaderStageCreateInfo));
- pPipeline->active_shaders |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
- break;
- case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
- memcpy(&pPipeline->tesCI, pPSSCI, sizeof(VkPipelineShaderStageCreateInfo));
- pPipeline->active_shaders |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
- break;
- case VK_SHADER_STAGE_GEOMETRY_BIT:
- memcpy(&pPipeline->gsCI, pPSSCI, sizeof(VkPipelineShaderStageCreateInfo));
- pPipeline->active_shaders |= VK_SHADER_STAGE_GEOMETRY_BIT;
- break;
- case VK_SHADER_STAGE_FRAGMENT_BIT:
- memcpy(&pPipeline->fsCI, pPSSCI, sizeof(VkPipelineShaderStageCreateInfo));
- pPipeline->active_shaders |= VK_SHADER_STAGE_FRAGMENT_BIT;
- break;
- case VK_SHADER_STAGE_COMPUTE_BIT:
- // TODO : Flag error, CS is specified through VkComputePipelineCreateInfo
- pPipeline->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
- break;
- default:
- // TODO : Flag error
- break;
+ case VK_SHADER_STAGE_VERTEX_BIT:
+ memcpy(&pPipeline->vsCI, pPSSCI,
+ sizeof(VkPipelineShaderStageCreateInfo));
+ pPipeline->active_shaders |= VK_SHADER_STAGE_VERTEX_BIT;
+ break;
+ case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
+ memcpy(&pPipeline->tcsCI, pPSSCI,
+ sizeof(VkPipelineShaderStageCreateInfo));
+ pPipeline->active_shaders |=
+ VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
+ break;
+ case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
+ memcpy(&pPipeline->tesCI, pPSSCI,
+ sizeof(VkPipelineShaderStageCreateInfo));
+ pPipeline->active_shaders |=
+ VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
+ break;
+ case VK_SHADER_STAGE_GEOMETRY_BIT:
+ memcpy(&pPipeline->gsCI, pPSSCI,
+ sizeof(VkPipelineShaderStageCreateInfo));
+ pPipeline->active_shaders |= VK_SHADER_STAGE_GEOMETRY_BIT;
+ break;
+ case VK_SHADER_STAGE_FRAGMENT_BIT:
+ memcpy(&pPipeline->fsCI, pPSSCI,
+ sizeof(VkPipelineShaderStageCreateInfo));
+ pPipeline->active_shaders |= VK_SHADER_STAGE_FRAGMENT_BIT;
+ break;
+ case VK_SHADER_STAGE_COMPUTE_BIT:
+ // TODO : Flag error, CS is specified through
+ // VkComputePipelineCreateInfo
+ pPipeline->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
+ break;
+ default:
+ // TODO : Flag error
+ break;
}
}
// Copy over GraphicsPipelineCreateInfo structure embedded pointers
if (pCreateInfo->stageCount != 0) {
- pPipeline->graphicsPipelineCI.pStages = new VkPipelineShaderStageCreateInfo[pCreateInfo->stageCount];
- bufferSize = pCreateInfo->stageCount * sizeof(VkPipelineShaderStageCreateInfo);
- memcpy((void*)pPipeline->graphicsPipelineCI.pStages, pCreateInfo->pStages, bufferSize);
+ pPipeline->graphicsPipelineCI.pStages =
+ new VkPipelineShaderStageCreateInfo[pCreateInfo->stageCount];
+ bufferSize =
+ pCreateInfo->stageCount * sizeof(VkPipelineShaderStageCreateInfo);
+ memcpy((void *)pPipeline->graphicsPipelineCI.pStages,
+ pCreateInfo->pStages, bufferSize);
}
if (pCreateInfo->pVertexInputState != NULL) {
- memcpy((void*)&pPipeline->vertexInputCI, pCreateInfo->pVertexInputState , sizeof(VkPipelineVertexInputStateCreateInfo));
+ memcpy((void *)&pPipeline->vertexInputCI,
+ pCreateInfo->pVertexInputState,
+ sizeof(VkPipelineVertexInputStateCreateInfo));
// Copy embedded ptrs
pVICI = pCreateInfo->pVertexInputState;
pPipeline->vtxBindingCount = pVICI->vertexBindingDescriptionCount;
if (pPipeline->vtxBindingCount) {
- pPipeline->pVertexBindingDescriptions = new VkVertexInputBindingDescription[pPipeline->vtxBindingCount];
- bufferSize = pPipeline->vtxBindingCount * sizeof(VkVertexInputBindingDescription);
- memcpy((void*)pPipeline->pVertexBindingDescriptions, pVICI->pVertexBindingDescriptions, bufferSize);
+ pPipeline->pVertexBindingDescriptions =
+ new VkVertexInputBindingDescription[pPipeline->vtxBindingCount];
+ bufferSize = pPipeline->vtxBindingCount *
+ sizeof(VkVertexInputBindingDescription);
+ memcpy((void *)pPipeline->pVertexBindingDescriptions,
+ pVICI->pVertexBindingDescriptions, bufferSize);
}
pPipeline->vtxAttributeCount = pVICI->vertexAttributeDescriptionCount;
if (pPipeline->vtxAttributeCount) {
- pPipeline->pVertexAttributeDescriptions = new VkVertexInputAttributeDescription[pPipeline->vtxAttributeCount];
- bufferSize = pPipeline->vtxAttributeCount * sizeof(VkVertexInputAttributeDescription);
- memcpy((void*)pPipeline->pVertexAttributeDescriptions, pVICI->pVertexAttributeDescriptions, bufferSize);
+ pPipeline->pVertexAttributeDescriptions =
+ new VkVertexInputAttributeDescription[pPipeline
+ ->vtxAttributeCount];
+ bufferSize = pPipeline->vtxAttributeCount *
+ sizeof(VkVertexInputAttributeDescription);
+ memcpy((void *)pPipeline->pVertexAttributeDescriptions,
+ pVICI->pVertexAttributeDescriptions, bufferSize);
}
- pPipeline->graphicsPipelineCI.pVertexInputState = &pPipeline->vertexInputCI;
+ pPipeline->graphicsPipelineCI.pVertexInputState =
+ &pPipeline->vertexInputCI;
}
if (pCreateInfo->pInputAssemblyState != NULL) {
- memcpy((void*)&pPipeline->iaStateCI, pCreateInfo->pInputAssemblyState, sizeof(VkPipelineInputAssemblyStateCreateInfo));
- pPipeline->graphicsPipelineCI.pInputAssemblyState = &pPipeline->iaStateCI;
+ memcpy((void *)&pPipeline->iaStateCI, pCreateInfo->pInputAssemblyState,
+ sizeof(VkPipelineInputAssemblyStateCreateInfo));
+ pPipeline->graphicsPipelineCI.pInputAssemblyState =
+ &pPipeline->iaStateCI;
}
if (pCreateInfo->pTessellationState != NULL) {
- memcpy((void*)&pPipeline->tessStateCI, pCreateInfo->pTessellationState, sizeof(VkPipelineTessellationStateCreateInfo));
- pPipeline->graphicsPipelineCI.pTessellationState = &pPipeline->tessStateCI;
+ memcpy((void *)&pPipeline->tessStateCI, pCreateInfo->pTessellationState,
+ sizeof(VkPipelineTessellationStateCreateInfo));
+ pPipeline->graphicsPipelineCI.pTessellationState =
+ &pPipeline->tessStateCI;
}
if (pCreateInfo->pViewportState != NULL) {
- memcpy((void*)&pPipeline->vpStateCI, pCreateInfo->pViewportState, sizeof(VkPipelineViewportStateCreateInfo));
+ memcpy((void *)&pPipeline->vpStateCI, pCreateInfo->pViewportState,
+ sizeof(VkPipelineViewportStateCreateInfo));
pPipeline->graphicsPipelineCI.pViewportState = &pPipeline->vpStateCI;
}
if (pCreateInfo->pRasterizationState != NULL) {
- memcpy((void*)&pPipeline->rsStateCI, pCreateInfo->pRasterizationState, sizeof(VkPipelineRasterizationStateCreateInfo));
- pPipeline->graphicsPipelineCI.pRasterizationState = &pPipeline->rsStateCI;
+ memcpy((void *)&pPipeline->rsStateCI, pCreateInfo->pRasterizationState,
+ sizeof(VkPipelineRasterizationStateCreateInfo));
+ pPipeline->graphicsPipelineCI.pRasterizationState =
+ &pPipeline->rsStateCI;
}
if (pCreateInfo->pMultisampleState != NULL) {
- memcpy((void*)&pPipeline->msStateCI, pCreateInfo->pMultisampleState, sizeof(VkPipelineMultisampleStateCreateInfo));
+ memcpy((void *)&pPipeline->msStateCI, pCreateInfo->pMultisampleState,
+ sizeof(VkPipelineMultisampleStateCreateInfo));
pPipeline->graphicsPipelineCI.pMultisampleState = &pPipeline->msStateCI;
}
if (pCreateInfo->pDepthStencilState != NULL) {
- memcpy((void*)&pPipeline->dsStateCI, pCreateInfo->pDepthStencilState, sizeof(VkPipelineDepthStencilStateCreateInfo));
- pPipeline->graphicsPipelineCI.pDepthStencilState = &pPipeline->dsStateCI;
+ memcpy((void *)&pPipeline->dsStateCI, pCreateInfo->pDepthStencilState,
+ sizeof(VkPipelineDepthStencilStateCreateInfo));
+ pPipeline->graphicsPipelineCI.pDepthStencilState =
+ &pPipeline->dsStateCI;
}
if (pCreateInfo->pColorBlendState != NULL) {
- memcpy((void*)&pPipeline->cbStateCI, pCreateInfo->pColorBlendState, sizeof(VkPipelineColorBlendStateCreateInfo));
+ memcpy((void *)&pPipeline->cbStateCI, pCreateInfo->pColorBlendState,
+ sizeof(VkPipelineColorBlendStateCreateInfo));
// Copy embedded ptrs
pCBCI = pCreateInfo->pColorBlendState;
pPipeline->attachmentCount = pCBCI->attachmentCount;
if (pPipeline->attachmentCount) {
- pPipeline->pAttachments = new VkPipelineColorBlendAttachmentState[pPipeline->attachmentCount];
- bufferSize = pPipeline->attachmentCount * sizeof(VkPipelineColorBlendAttachmentState);
- memcpy((void*)pPipeline->pAttachments, pCBCI->pAttachments, bufferSize);
+ pPipeline->pAttachments =
+ new VkPipelineColorBlendAttachmentState[pPipeline
+ ->attachmentCount];
+ bufferSize = pPipeline->attachmentCount *
+ sizeof(VkPipelineColorBlendAttachmentState);
+ memcpy((void *)pPipeline->pAttachments, pCBCI->pAttachments,
+ bufferSize);
}
pPipeline->graphicsPipelineCI.pColorBlendState = &pPipeline->cbStateCI;
}
if (pCreateInfo->pDynamicState != NULL) {
- memcpy((void*)&pPipeline->dynStateCI, pCreateInfo->pDynamicState, sizeof(VkPipelineDynamicStateCreateInfo));
+ memcpy((void *)&pPipeline->dynStateCI, pCreateInfo->pDynamicState,
+ sizeof(VkPipelineDynamicStateCreateInfo));
if (pPipeline->dynStateCI.dynamicStateCount) {
- pPipeline->dynStateCI.pDynamicStates = new VkDynamicState[pPipeline->dynStateCI.dynamicStateCount];
- bufferSize = pPipeline->dynStateCI.dynamicStateCount * sizeof(VkDynamicState);
- memcpy((void*)pPipeline->dynStateCI.pDynamicStates, pCreateInfo->pDynamicState->pDynamicStates, bufferSize);
+ pPipeline->dynStateCI.pDynamicStates =
+ new VkDynamicState[pPipeline->dynStateCI.dynamicStateCount];
+ bufferSize = pPipeline->dynStateCI.dynamicStateCount *
+ sizeof(VkDynamicState);
+ memcpy((void *)pPipeline->dynStateCI.pDynamicStates,
+ pCreateInfo->pDynamicState->pDynamicStates, bufferSize);
}
pPipeline->graphicsPipelineCI.pDynamicState = &pPipeline->dynStateCI;
}
@@ -1784,25 +2224,25 @@
}
// Free the Pipeline nodes
-static void deletePipelines(layer_data* my_data)
-{
+static void deletePipelines(layer_data *my_data) {
if (my_data->pipelineMap.size() <= 0)
return;
- for (auto ii=my_data->pipelineMap.begin(); ii!=my_data->pipelineMap.end(); ++ii) {
+ for (auto ii = my_data->pipelineMap.begin();
+ ii != my_data->pipelineMap.end(); ++ii) {
if ((*ii).second->graphicsPipelineCI.stageCount != 0) {
- delete[] (*ii).second->graphicsPipelineCI.pStages;
+ delete[](*ii).second->graphicsPipelineCI.pStages;
}
if ((*ii).second->pVertexBindingDescriptions) {
- delete[] (*ii).second->pVertexBindingDescriptions;
+ delete[](*ii).second->pVertexBindingDescriptions;
}
if ((*ii).second->pVertexAttributeDescriptions) {
- delete[] (*ii).second->pVertexAttributeDescriptions;
+ delete[](*ii).second->pVertexAttributeDescriptions;
}
if ((*ii).second->pAttachments) {
- delete[] (*ii).second->pAttachments;
+ delete[](*ii).second->pAttachments;
}
if ((*ii).second->dynStateCI.dynamicStateCount != 0) {
- delete[] (*ii).second->dynStateCI.pDynamicStates;
+ delete[](*ii).second->dynStateCI.pDynamicStates;
}
delete (*ii).second;
}
@@ -1810,56 +2250,77 @@
}
// For given pipeline, return number of MSAA samples, or one if MSAA disabled
-static VkSampleCountFlagBits getNumSamples(layer_data* my_data, const VkPipeline pipeline)
-{
- PIPELINE_NODE* pPipe = my_data->pipelineMap[pipeline];
- if (VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO == pPipe->msStateCI.sType) {
+static VkSampleCountFlagBits getNumSamples(layer_data *my_data,
+ const VkPipeline pipeline) {
+ PIPELINE_NODE *pPipe = my_data->pipelineMap[pipeline];
+ if (VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO ==
+ pPipe->msStateCI.sType) {
return pPipe->msStateCI.rasterizationSamples;
}
return VK_SAMPLE_COUNT_1_BIT;
}
// Validate state related to the PSO
-static VkBool32 validatePipelineState(layer_data* my_data, const GLOBAL_CB_NODE* pCB, const VkPipelineBindPoint pipelineBindPoint, const VkPipeline pipeline)
-{
+static VkBool32
+validatePipelineState(layer_data *my_data, const GLOBAL_CB_NODE *pCB,
+ const VkPipelineBindPoint pipelineBindPoint,
+ const VkPipeline pipeline) {
if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
// Verify that any MSAA request in PSO matches sample# in bound FB
VkSampleCountFlagBits psoNumSamples = getNumSamples(my_data, pipeline);
if (pCB->activeRenderPass) {
- const VkRenderPassCreateInfo* pRPCI = my_data->renderPassMap[pCB->activeRenderPass]->pCreateInfo;
- const VkSubpassDescription* pSD = &pRPCI->pSubpasses[pCB->activeSubpass];
- VkSampleCountFlagBits subpassNumSamples = (VkSampleCountFlagBits) 0;
+ const VkRenderPassCreateInfo *pRPCI =
+ my_data->renderPassMap[pCB->activeRenderPass]->pCreateInfo;
+ const VkSubpassDescription *pSD =
+ &pRPCI->pSubpasses[pCB->activeSubpass];
+ VkSampleCountFlagBits subpassNumSamples = (VkSampleCountFlagBits)0;
uint32_t i;
for (i = 0; i < pSD->colorAttachmentCount; i++) {
VkSampleCountFlagBits samples;
- if (pSD->pColorAttachments[i].attachment == VK_ATTACHMENT_UNUSED)
+ if (pSD->pColorAttachments[i].attachment ==
+ VK_ATTACHMENT_UNUSED)
continue;
- samples = pRPCI->pAttachments[pSD->pColorAttachments[i].attachment].samples;
- if (subpassNumSamples == (VkSampleCountFlagBits) 0) {
+ samples =
+ pRPCI->pAttachments[pSD->pColorAttachments[i].attachment]
+ .samples;
+ if (subpassNumSamples == (VkSampleCountFlagBits)0) {
subpassNumSamples = samples;
} else if (subpassNumSamples != samples) {
- subpassNumSamples = (VkSampleCountFlagBits) -1;
+ subpassNumSamples = (VkSampleCountFlagBits)-1;
break;
}
}
- if (pSD->pDepthStencilAttachment && pSD->pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- const VkSampleCountFlagBits samples = pRPCI->pAttachments[pSD->pDepthStencilAttachment->attachment].samples;
- if (subpassNumSamples == (VkSampleCountFlagBits) 0)
+ if (pSD->pDepthStencilAttachment &&
+ pSD->pDepthStencilAttachment->attachment !=
+ VK_ATTACHMENT_UNUSED) {
+ const VkSampleCountFlagBits samples =
+ pRPCI
+ ->pAttachments[pSD->pDepthStencilAttachment->attachment]
+ .samples;
+ if (subpassNumSamples == (VkSampleCountFlagBits)0)
subpassNumSamples = samples;
else if (subpassNumSamples != samples)
- subpassNumSamples = (VkSampleCountFlagBits) -1;
+ subpassNumSamples = (VkSampleCountFlagBits)-1;
}
if (psoNumSamples != subpassNumSamples) {
- return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, (uint64_t) pipeline, __LINE__, DRAWSTATE_NUM_SAMPLES_MISMATCH, "DS",
- "Num samples mismatch! Binding PSO (%#" PRIxLEAST64 ") with %u samples while current RenderPass (%#" PRIxLEAST64 ") w/ %u samples!",
- (uint64_t) pipeline, psoNumSamples, (uint64_t) pCB->activeRenderPass, subpassNumSamples);
+ return log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+ (uint64_t)pipeline, __LINE__,
+ DRAWSTATE_NUM_SAMPLES_MISMATCH, "DS",
+ "Num samples mismatch! Binding PSO (%#" PRIxLEAST64
+ ") with %u samples while current RenderPass (%#" PRIxLEAST64
+ ") w/ %u samples!",
+ (uint64_t)pipeline, psoNumSamples,
+ (uint64_t)pCB->activeRenderPass, subpassNumSamples);
}
} else {
- // TODO : I believe it's an error if we reach this point and don't have an activeRenderPass
+ // TODO : I believe it's an error if we reach this point and don't
+ // have an activeRenderPass
// Verify and flag error as appropriate
}
// TODO : Add more checks here
@@ -1872,10 +2333,11 @@
// Block of code at start here specifically for managing/tracking DSs
// Return Pool node ptr for specified pool or else NULL
-static DESCRIPTOR_POOL_NODE* getPoolNode(layer_data* my_data, const VkDescriptorPool pool)
-{
+static DESCRIPTOR_POOL_NODE *getPoolNode(layer_data *my_data,
+ const VkDescriptorPool pool) {
loader_platform_thread_lock_mutex(&globalLock);
- if (my_data->descriptorPoolMap.find(pool) == my_data->descriptorPoolMap.end()) {
+ if (my_data->descriptorPoolMap.find(pool) ==
+ my_data->descriptorPoolMap.end()) {
loader_platform_thread_unlock_mutex(&globalLock);
return NULL;
}
@@ -1883,9 +2345,11 @@
return my_data->descriptorPoolMap[pool];
}
-static LAYOUT_NODE* getLayoutNode(layer_data* my_data, const VkDescriptorSetLayout layout) {
+static LAYOUT_NODE *getLayoutNode(layer_data *my_data,
+ const VkDescriptorSetLayout layout) {
loader_platform_thread_lock_mutex(&globalLock);
- if (my_data->descriptorSetLayoutMap.find(layout) == my_data->descriptorSetLayoutMap.end()) {
+ if (my_data->descriptorSetLayoutMap.find(layout) ==
+ my_data->descriptorSetLayoutMap.end()) {
loader_platform_thread_unlock_mutex(&globalLock);
return NULL;
}
@@ -1893,39 +2357,45 @@
return my_data->descriptorSetLayoutMap[layout];
}
-// Return VK_FALSE if update struct is of valid type, otherwise flag error and return code from callback
-static VkBool32 validUpdateStruct(layer_data* my_data, const VkDevice device, const GENERIC_HEADER* pUpdateStruct)
-{
- switch (pUpdateStruct->sType)
- {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- return VK_FALSE;
- default:
- return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
- "Unexpected UPDATE struct of type %s (value %u) in vkUpdateDescriptors() struct tree", string_VkStructureType(pUpdateStruct->sType), pUpdateStruct->sType);
+// Return VK_FALSE if update struct is of valid type, otherwise flag error and
+// return code from callback
+static VkBool32 validUpdateStruct(layer_data *my_data, const VkDevice device,
+ const GENERIC_HEADER *pUpdateStruct) {
+ switch (pUpdateStruct->sType) {
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ return VK_FALSE;
+ default:
+ return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
+ "Unexpected UPDATE struct of type %s (value %u) in "
+ "vkUpdateDescriptors() struct tree",
+ string_VkStructureType(pUpdateStruct->sType),
+ pUpdateStruct->sType);
}
}
// Set count for given update struct in the last parameter
-// Return value of skipCall, which is only VK_TRUE if error occurs and callback signals execution to cease
-static uint32_t getUpdateCount(layer_data* my_data, const VkDevice device, const GENERIC_HEADER* pUpdateStruct)
-{
- switch (pUpdateStruct->sType)
- {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- return ((VkWriteDescriptorSet*)pUpdateStruct)->descriptorCount;
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- // TODO : Need to understand this case better and make sure code is correct
- return ((VkCopyDescriptorSet*)pUpdateStruct)->descriptorCount;
+// Return value of skipCall, which is only VK_TRUE if error occurs and callback
+// signals execution to cease
+static uint32_t getUpdateCount(layer_data *my_data, const VkDevice device,
+ const GENERIC_HEADER *pUpdateStruct) {
+ switch (pUpdateStruct->sType) {
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ return ((VkWriteDescriptorSet *)pUpdateStruct)->descriptorCount;
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ // TODO : Need to understand this case better and make sure code is
+ // correct
+ return ((VkCopyDescriptorSet *)pUpdateStruct)->descriptorCount;
}
- return 0;
+ return 0;
}
// For given Layout Node and binding, return index where that binding begins
-static uint32_t getBindingStartIndex(const LAYOUT_NODE* pLayout, const uint32_t binding)
-{
+static uint32_t getBindingStartIndex(const LAYOUT_NODE *pLayout,
+ const uint32_t binding) {
uint32_t offsetIndex = 0;
for (uint32_t i = 0; i < pLayout->createInfo.bindingCount; i++) {
if (pLayout->createInfo.pBindings[i].binding == binding)
@@ -1936,62 +2406,89 @@
}
// For given layout node and binding, return last index that is updated
-static uint32_t getBindingEndIndex(const LAYOUT_NODE* pLayout, const uint32_t binding)
-{
+static uint32_t getBindingEndIndex(const LAYOUT_NODE *pLayout,
+ const uint32_t binding) {
uint32_t offsetIndex = 0;
- for (uint32_t i = 0; i < pLayout->createInfo.bindingCount; i++) {
+ for (uint32_t i = 0; i < pLayout->createInfo.bindingCount; i++) {
offsetIndex += pLayout->createInfo.pBindings[i].descriptorCount;
if (pLayout->createInfo.pBindings[i].binding == binding)
break;
}
- return offsetIndex-1;
+ return offsetIndex - 1;
}
-// For given layout and update, return the first overall index of the layout that is updated
-static uint32_t getUpdateStartIndex(layer_data* my_data, const VkDevice device, const LAYOUT_NODE* pLayout, const uint32_t binding, const uint32_t arrayIndex, const GENERIC_HEADER* pUpdateStruct)
-{
- return getBindingStartIndex(pLayout, binding)+arrayIndex;
+// For given layout and update, return the first overall index of the layout
+// that is updated
+static uint32_t getUpdateStartIndex(layer_data *my_data, const VkDevice device,
+ const LAYOUT_NODE *pLayout,
+ const uint32_t binding,
+ const uint32_t arrayIndex,
+ const GENERIC_HEADER *pUpdateStruct) {
+ return getBindingStartIndex(pLayout, binding) + arrayIndex;
}
-// For given layout and update, return the last overall index of the layout that is updated
-static uint32_t getUpdateEndIndex(layer_data* my_data, const VkDevice device, const LAYOUT_NODE* pLayout, const uint32_t binding, const uint32_t arrayIndex, const GENERIC_HEADER* pUpdateStruct)
-{
+// For given layout and update, return the last overall index of the layout that
+// is updated
+static uint32_t getUpdateEndIndex(layer_data *my_data, const VkDevice device,
+ const LAYOUT_NODE *pLayout,
+ const uint32_t binding,
+ const uint32_t arrayIndex,
+ const GENERIC_HEADER *pUpdateStruct) {
uint32_t count = getUpdateCount(my_data, device, pUpdateStruct);
- return getBindingStartIndex(pLayout, binding)+arrayIndex+count-1;
+ return getBindingStartIndex(pLayout, binding) + arrayIndex + count - 1;
}
-// Verify that the descriptor type in the update struct matches what's expected by the layout
-static VkBool32 validateUpdateConsistency(layer_data* my_data, const VkDevice device, const LAYOUT_NODE* pLayout, const GENERIC_HEADER* pUpdateStruct, uint32_t startIndex, uint32_t endIndex)
-{
+// Verify that the descriptor type in the update struct matches what's expected
+// by the layout
+static VkBool32 validateUpdateConsistency(layer_data *my_data,
+ const VkDevice device,
+ const LAYOUT_NODE *pLayout,
+ const GENERIC_HEADER *pUpdateStruct,
+ uint32_t startIndex,
+ uint32_t endIndex) {
// First get actual type of update
VkBool32 skipCall = VK_FALSE;
VkDescriptorType actualType;
uint32_t i = 0;
- switch (pUpdateStruct->sType)
- {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- actualType = ((VkWriteDescriptorSet*)pUpdateStruct)->descriptorType;
- break;
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- /* no need to validate */
- return VK_FALSE;
- break;
- default:
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
- "Unexpected UPDATE struct of type %s (value %u) in vkUpdateDescriptors() struct tree", string_VkStructureType(pUpdateStruct->sType), pUpdateStruct->sType);
+ switch (pUpdateStruct->sType) {
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ actualType = ((VkWriteDescriptorSet *)pUpdateStruct)->descriptorType;
+ break;
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ /* no need to validate */
+ return VK_FALSE;
+ break;
+ default:
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
+ "Unexpected UPDATE struct of type %s (value %u) in "
+ "vkUpdateDescriptors() struct tree",
+ string_VkStructureType(pUpdateStruct->sType),
+ pUpdateStruct->sType);
}
if (VK_FALSE == skipCall) {
- // Set first stageFlags as reference and verify that all other updates match it
+ // Set first stageFlags as reference and verify that all other updates
+ // match it
VkShaderStageFlags refStageFlags = pLayout->stageFlags[startIndex];
for (i = startIndex; i <= endIndex; i++) {
if (pLayout->descriptorTypes[i] != actualType) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_DESCRIPTOR_TYPE_MISMATCH, "DS",
- "Write descriptor update has descriptor type %s that does not match overlapping binding descriptor type of %s!",
- string_VkDescriptorType(actualType), string_VkDescriptorType(pLayout->descriptorTypes[i]));
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_DESCRIPTOR_TYPE_MISMATCH, "DS",
+ "Write descriptor update has descriptor type %s that does "
+ "not match overlapping binding descriptor type of %s!",
+ string_VkDescriptorType(actualType),
+ string_VkDescriptorType(pLayout->descriptorTypes[i]));
}
if (pLayout->stageFlags[i] != refStageFlags) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_DESCRIPTOR_STAGEFLAGS_MISMATCH, "DS",
- "Write descriptor update has stageFlags %x that do not match overlapping binding descriptor stageFlags of %x!",
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_DESCRIPTOR_STAGEFLAGS_MISMATCH, "DS",
+ "Write descriptor update has stageFlags %x that do not "
+ "match overlapping binding descriptor stageFlags of %x!",
refStageFlags, pLayout->stageFlags[i]);
}
}
@@ -1999,68 +2496,73 @@
return skipCall;
}
-// Determine the update type, allocate a new struct of that type, shadow the given pUpdate
-// struct into the pNewNode param. Return VK_TRUE if error condition encountered and callback signals early exit.
+// Determine the update type, allocate a new struct of that type, shadow the
+// given pUpdate
+// struct into the pNewNode param. Return VK_TRUE if error condition
+// encountered and callback signals early exit.
// NOTE : Calls to this function should be wrapped in mutex
-static VkBool32 shadowUpdateNode(layer_data* my_data, const VkDevice device, GENERIC_HEADER* pUpdate, GENERIC_HEADER** pNewNode)
-{
+static VkBool32 shadowUpdateNode(layer_data *my_data, const VkDevice device,
+ GENERIC_HEADER *pUpdate,
+ GENERIC_HEADER **pNewNode) {
VkBool32 skipCall = VK_FALSE;
- VkWriteDescriptorSet* pWDS = NULL;
- VkCopyDescriptorSet* pCDS = NULL;
+ VkWriteDescriptorSet *pWDS = NULL;
+ VkCopyDescriptorSet *pCDS = NULL;
size_t array_size = 0;
size_t base_array_size = 0;
size_t total_array_size = 0;
size_t baseBuffAddr = 0;
- switch (pUpdate->sType)
- {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- pWDS = new VkWriteDescriptorSet;
- *pNewNode = (GENERIC_HEADER*)pWDS;
- memcpy(pWDS, pUpdate, sizeof(VkWriteDescriptorSet));
+ switch (pUpdate->sType) {
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ pWDS = new VkWriteDescriptorSet;
+ *pNewNode = (GENERIC_HEADER *)pWDS;
+ memcpy(pWDS, pUpdate, sizeof(VkWriteDescriptorSet));
- switch (pWDS->descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- {
- VkDescriptorImageInfo *info = new VkDescriptorImageInfo[pWDS->descriptorCount];
- memcpy(info, pWDS->pImageInfo, pWDS->descriptorCount * sizeof(VkDescriptorImageInfo));
- pWDS->pImageInfo = info;
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- {
- VkBufferView *info = new VkBufferView[pWDS->descriptorCount];
- memcpy(info, pWDS->pTexelBufferView, pWDS->descriptorCount * sizeof(VkBufferView));
- pWDS->pTexelBufferView = info;
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- {
- VkDescriptorBufferInfo *info = new VkDescriptorBufferInfo[pWDS->descriptorCount];
- memcpy(info, pWDS->pBufferInfo, pWDS->descriptorCount * sizeof(VkDescriptorBufferInfo));
- pWDS->pBufferInfo = info;
- }
- break;
- default:
- return VK_ERROR_VALIDATION_FAILED_EXT;
- break;
- }
- break;
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- pCDS = new VkCopyDescriptorSet;
- *pNewNode = (GENERIC_HEADER*)pCDS;
- memcpy(pCDS, pUpdate, sizeof(VkCopyDescriptorSet));
- break;
+ switch (pWDS->descriptorType) {
+ case VK_DESCRIPTOR_TYPE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
+ VkDescriptorImageInfo *info =
+ new VkDescriptorImageInfo[pWDS->descriptorCount];
+ memcpy(info, pWDS->pImageInfo,
+ pWDS->descriptorCount * sizeof(VkDescriptorImageInfo));
+ pWDS->pImageInfo = info;
+ } break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
+ VkBufferView *info = new VkBufferView[pWDS->descriptorCount];
+ memcpy(info, pWDS->pTexelBufferView,
+ pWDS->descriptorCount * sizeof(VkBufferView));
+ pWDS->pTexelBufferView = info;
+ } break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ VkDescriptorBufferInfo *info =
+ new VkDescriptorBufferInfo[pWDS->descriptorCount];
+ memcpy(info, pWDS->pBufferInfo,
+ pWDS->descriptorCount * sizeof(VkDescriptorBufferInfo));
+ pWDS->pBufferInfo = info;
+ } break;
default:
- if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
- "Unexpected UPDATE struct of type %s (value %u) in vkUpdateDescriptors() struct tree", string_VkStructureType(pUpdate->sType), pUpdate->sType))
- return VK_TRUE;
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ break;
+ }
+ break;
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ pCDS = new VkCopyDescriptorSet;
+ *pNewNode = (GENERIC_HEADER *)pCDS;
+ memcpy(pCDS, pUpdate, sizeof(VkCopyDescriptorSet));
+ break;
+ default:
+ if (log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
+ "Unexpected UPDATE struct of type %s (value %u) in "
+ "vkUpdateDescriptors() struct tree",
+ string_VkStructureType(pUpdate->sType), pUpdate->sType))
+ return VK_TRUE;
}
// Make sure that pNext for the end of shadow copy is NULL
(*pNewNode)->pNext = NULL;
@@ -2068,17 +2570,28 @@
}
// Verify that given sampler is valid
-static VkBool32 validateSampler(const layer_data* my_data, const VkSampler* pSampler, const VkBool32 immutable)
-{
+static VkBool32 validateSampler(const layer_data *my_data,
+ const VkSampler *pSampler,
+ const VkBool32 immutable) {
VkBool32 skipCall = VK_FALSE;
auto sampIt = my_data->sampleMap.find(*pSampler);
if (sampIt == my_data->sampleMap.end()) {
if (!immutable) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, (uint64_t) *pSampler, __LINE__, DRAWSTATE_SAMPLER_DESCRIPTOR_ERROR, "DS",
- "vkUpdateDescriptorSets: Attempt to update descriptor with invalid sampler %#" PRIxLEAST64, (uint64_t) *pSampler);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, (uint64_t)*pSampler,
+ __LINE__, DRAWSTATE_SAMPLER_DESCRIPTOR_ERROR, "DS",
+ "vkUpdateDescriptorSets: Attempt to update descriptor with "
+ "invalid sampler %#" PRIxLEAST64,
+ (uint64_t)*pSampler);
} else { // immutable
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, (uint64_t) *pSampler, __LINE__, DRAWSTATE_SAMPLER_DESCRIPTOR_ERROR, "DS",
- "vkUpdateDescriptorSets: Attempt to update descriptor whose binding has an invalid immutable sampler %#" PRIxLEAST64, (uint64_t) *pSampler);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, (uint64_t)*pSampler,
+ __LINE__, DRAWSTATE_SAMPLER_DESCRIPTOR_ERROR, "DS",
+ "vkUpdateDescriptorSets: Attempt to update descriptor whose "
+ "binding has an invalid immutable sampler %#" PRIxLEAST64,
+ (uint64_t)*pSampler);
}
} else {
// TODO : Any further checks we want to do on the sampler?
@@ -2087,66 +2600,116 @@
}
// Verify that given imageView is valid
-static VkBool32 validateImageView(const layer_data* my_data, const VkImageView* pImageView, const VkImageLayout imageLayout)
-{
+static VkBool32 validateImageView(const layer_data *my_data,
+ const VkImageView *pImageView,
+ const VkImageLayout imageLayout) {
VkBool32 skipCall = VK_FALSE;
auto ivIt = my_data->imageViewMap.find(*pImageView);
if (ivIt == my_data->imageViewMap.end()) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, (uint64_t) *pImageView, __LINE__, DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, "DS",
- "vkUpdateDescriptorSets: Attempt to update descriptor with invalid imageView %#" PRIxLEAST64, (uint64_t) *pImageView);
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ (uint64_t)*pImageView, __LINE__,
+ DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, "DS",
+ "vkUpdateDescriptorSets: Attempt to update "
+ "descriptor with invalid imageView %#" PRIxLEAST64,
+ (uint64_t)*pImageView);
} else {
- // Validate that imageLayout is compatible with aspectMask and image format
- VkImageAspectFlags aspectMask = ivIt->second->subresourceRange.aspectMask;
+ // Validate that imageLayout is compatible with aspectMask and image
+ // format
+ VkImageAspectFlags aspectMask =
+ ivIt->second->subresourceRange.aspectMask;
VkImage image = ivIt->second->image;
// TODO : Check here in case we have a bad image
auto imgIt = my_data->imageLayoutMap.find(image);
if (imgIt == my_data->imageLayoutMap.end()) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t) image, __LINE__, DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, "DS",
- "vkUpdateDescriptorSets: Attempt to update descriptor with invalid image %#" PRIxLEAST64 " in imageView %#" PRIxLEAST64, (uint64_t) image, (uint64_t) *pImageView);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)image,
+ __LINE__, DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, "DS",
+ "vkUpdateDescriptorSets: Attempt to update descriptor with "
+ "invalid image %#" PRIxLEAST64 " in imageView %#" PRIxLEAST64,
+ (uint64_t)image, (uint64_t)*pImageView);
} else {
VkFormat format = (*imgIt).second->format;
VkBool32 ds = vk_format_is_depth_or_stencil(format);
switch (imageLayout) {
- case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
- // Only Color bit must be set
- if ((aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, (uint64_t) *pImageView, __LINE__,
- DRAWSTATE_INVALID_IMAGE_ASPECT, "DS", "vkUpdateDescriptorSets: Updating descriptor with layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL and imageView %#" PRIxLEAST64 ""
- " that does not have VK_IMAGE_ASPECT_COLOR_BIT set.", (uint64_t) *pImageView);
+ case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+ // Only Color bit must be set
+ if ((aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) !=
+ VK_IMAGE_ASPECT_COLOR_BIT) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ (uint64_t)*pImageView, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_ASPECT, "DS",
+ "vkUpdateDescriptorSets: Updating descriptor with "
+ "layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL and "
+ "imageView %#" PRIxLEAST64 ""
+ " that does not have VK_IMAGE_ASPECT_COLOR_BIT set.",
+ (uint64_t)*pImageView);
+ }
+ // format must NOT be DS
+ if (ds) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ (uint64_t)*pImageView, __LINE__,
+ DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, "DS",
+ "vkUpdateDescriptorSets: Updating descriptor with "
+ "layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL and "
+ "imageView %#" PRIxLEAST64 ""
+ " but the image format is %s which is not a color "
+ "format.",
+ (uint64_t)*pImageView, string_VkFormat(format));
+ }
+ break;
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+ // Depth or stencil bit must be set, but both must NOT be set
+ if (aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+ if (aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+ // both must NOT be set
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ (uint64_t)*pImageView, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_ASPECT, "DS",
+ "vkUpdateDescriptorSets: Updating descriptor with "
+ "imageView %#" PRIxLEAST64 ""
+ " that has both STENCIL and DEPTH aspects set",
+ (uint64_t)*pImageView);
}
- // format must NOT be DS
- if (ds) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, (uint64_t) *pImageView, __LINE__,
- DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, "DS", "vkUpdateDescriptorSets: Updating descriptor with layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL and imageView %#" PRIxLEAST64 ""
- " but the image format is %s which is not a color format.", (uint64_t) *pImageView, string_VkFormat(format));
- }
- break;
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
- // Depth or stencil bit must be set, but both must NOT be set
- if (aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) {
- if (aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) {
- // both must NOT be set
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, (uint64_t) *pImageView, __LINE__,
- DRAWSTATE_INVALID_IMAGE_ASPECT, "DS", "vkUpdateDescriptorSets: Updating descriptor with imageView %#" PRIxLEAST64 ""
- " that has both STENCIL and DEPTH aspects set", (uint64_t) *pImageView);
- }
- } else if (!(aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
- // Neither were set
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, (uint64_t) *pImageView, __LINE__,
- DRAWSTATE_INVALID_IMAGE_ASPECT, "DS", "vkUpdateDescriptorSets: Updating descriptor with layout %s and imageView %#" PRIxLEAST64 ""
- " that does not have STENCIL or DEPTH aspect set.", string_VkImageLayout(imageLayout), (uint64_t) *pImageView);
- }
- // format must be DS
- if (!ds) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, (uint64_t) *pImageView, __LINE__,
- DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, "DS", "vkUpdateDescriptorSets: Updating descriptor with layout %s and imageView %#" PRIxLEAST64 ""
- " but the image format is %s which is not a depth/stencil format.", string_VkImageLayout(imageLayout), (uint64_t) *pImageView, string_VkFormat(format));
- }
- break;
- default:
- // anything to check for other layouts?
- break;
+ } else if (!(aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
+ // Neither were set
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ (uint64_t)*pImageView, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_ASPECT, "DS",
+ "vkUpdateDescriptorSets: Updating descriptor with "
+ "layout %s and imageView %#" PRIxLEAST64 ""
+ " that does not have STENCIL or DEPTH aspect set.",
+ string_VkImageLayout(imageLayout),
+ (uint64_t)*pImageView);
+ }
+ // format must be DS
+ if (!ds) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+ (uint64_t)*pImageView, __LINE__,
+ DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, "DS",
+ "vkUpdateDescriptorSets: Updating descriptor with "
+ "layout %s and imageView %#" PRIxLEAST64 ""
+ " but the image format is %s which is not a "
+ "depth/stencil format.",
+ string_VkImageLayout(imageLayout),
+ (uint64_t)*pImageView, string_VkFormat(format));
+ }
+ break;
+ default:
+ // anything to check for other layouts?
+ break;
}
}
}
@@ -2154,13 +2717,18 @@
}
// Verify that given bufferView is valid
-static VkBool32 validateBufferView(const layer_data* my_data, const VkBufferView* pBufferView)
-{
+static VkBool32 validateBufferView(const layer_data *my_data,
+ const VkBufferView *pBufferView) {
VkBool32 skipCall = VK_FALSE;
auto sampIt = my_data->bufferViewMap.find(*pBufferView);
if (sampIt == my_data->bufferViewMap.end()) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, (uint64_t) *pBufferView, __LINE__, DRAWSTATE_BUFFERVIEW_DESCRIPTOR_ERROR, "DS",
- "vkUpdateDescriptorSets: Attempt to update descriptor with invalid bufferView %#" PRIxLEAST64, (uint64_t) *pBufferView);
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+ (uint64_t)*pBufferView, __LINE__,
+ DRAWSTATE_BUFFERVIEW_DESCRIPTOR_ERROR, "DS",
+ "vkUpdateDescriptorSets: Attempt to update "
+ "descriptor with invalid bufferView %#" PRIxLEAST64,
+ (uint64_t)*pBufferView);
} else {
// TODO : Any further checks we want to do on the bufferView?
}
@@ -2168,104 +2736,145 @@
}
// Verify that given bufferInfo is valid
-static VkBool32 validateBufferInfo(const layer_data* my_data, const VkDescriptorBufferInfo* pBufferInfo)
-{
+static VkBool32 validateBufferInfo(const layer_data *my_data,
+ const VkDescriptorBufferInfo *pBufferInfo) {
VkBool32 skipCall = VK_FALSE;
auto sampIt = my_data->bufferMap.find(pBufferInfo->buffer);
if (sampIt == my_data->bufferMap.end()) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, (uint64_t) pBufferInfo->buffer, __LINE__, DRAWSTATE_BUFFERINFO_DESCRIPTOR_ERROR, "DS",
- "vkUpdateDescriptorSets: Attempt to update descriptor where bufferInfo has invalid buffer %#" PRIxLEAST64, (uint64_t) pBufferInfo->buffer);
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ (uint64_t)pBufferInfo->buffer, __LINE__,
+ DRAWSTATE_BUFFERINFO_DESCRIPTOR_ERROR, "DS",
+ "vkUpdateDescriptorSets: Attempt to update descriptor "
+ "where bufferInfo has invalid buffer %#" PRIxLEAST64,
+ (uint64_t)pBufferInfo->buffer);
} else {
// TODO : Any further checks we want to do on the bufferView?
}
return skipCall;
}
-static VkBool32 validateUpdateContents(const layer_data* my_data, const VkWriteDescriptorSet *pWDS, const VkDescriptorSetLayoutBinding* pLayoutBinding)
-{
+static VkBool32
+validateUpdateContents(const layer_data *my_data,
+ const VkWriteDescriptorSet *pWDS,
+ const VkDescriptorSetLayoutBinding *pLayoutBinding) {
VkBool32 skipCall = VK_FALSE;
- // First verify that for the given Descriptor type, the correct DescriptorInfo data is supplied
- VkBufferView* pBufferView = NULL;
- const VkSampler* pSampler = NULL;
- VkImageView* pImageView = NULL;
- VkImageLayout* pImageLayout = NULL;
- VkDescriptorBufferInfo* pBufferInfo = NULL;
+ // First verify that for the given Descriptor type, the correct
+ // DescriptorInfo data is supplied
+ VkBufferView *pBufferView = NULL;
+ const VkSampler *pSampler = NULL;
+ VkImageView *pImageView = NULL;
+ VkImageLayout *pImageLayout = NULL;
+ VkDescriptorBufferInfo *pBufferInfo = NULL;
VkBool32 immutable = VK_FALSE;
uint32_t i = 0;
// For given update type, verify that update contents are correct
switch (pWDS->descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- for (i=0; i<pWDS->descriptorCount; ++i) {
- skipCall |= validateSampler(my_data, &(pWDS->pImageInfo[i].sampler), immutable);
- }
- break;
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- for (i=0; i<pWDS->descriptorCount; ++i) {
- if (NULL == pLayoutBinding->pImmutableSamplers) {
- pSampler = &(pWDS->pImageInfo[i].sampler);
- if (immutable) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, (uint64_t) *pSampler, __LINE__, DRAWSTATE_INCONSISTENT_IMMUTABLE_SAMPLER_UPDATE, "DS",
- "vkUpdateDescriptorSets: Update #%u is not an immutable sampler %#" PRIxLEAST64 ", but previous update(s) from this "
- "VkWriteDescriptorSet struct used an immutable sampler. All updates from a single struct must either "
- "use immutable or non-immutable samplers.", i, (uint64_t) *pSampler);
- }
- } else {
- if (i>0 && !immutable) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, (uint64_t) *pSampler, __LINE__, DRAWSTATE_INCONSISTENT_IMMUTABLE_SAMPLER_UPDATE, "DS",
- "vkUpdateDescriptorSets: Update #%u is an immutable sampler, but previous update(s) from this "
- "VkWriteDescriptorSet struct used a non-immutable sampler. All updates from a single struct must either "
- "use immutable or non-immutable samplers.", i);
- }
- immutable = VK_TRUE;
- pSampler = &(pLayoutBinding->pImmutableSamplers[i]);
+ case VK_DESCRIPTOR_TYPE_SAMPLER:
+ for (i = 0; i < pWDS->descriptorCount; ++i) {
+ skipCall |= validateSampler(my_data, &(pWDS->pImageInfo[i].sampler),
+ immutable);
+ }
+ break;
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ for (i = 0; i < pWDS->descriptorCount; ++i) {
+ if (NULL == pLayoutBinding->pImmutableSamplers) {
+ pSampler = &(pWDS->pImageInfo[i].sampler);
+ if (immutable) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+ (uint64_t)*pSampler, __LINE__,
+ DRAWSTATE_INCONSISTENT_IMMUTABLE_SAMPLER_UPDATE, "DS",
+ "vkUpdateDescriptorSets: Update #%u is not an "
+ "immutable sampler %#" PRIxLEAST64
+ ", but previous update(s) from this "
+ "VkWriteDescriptorSet struct used an immutable "
+ "sampler. All updates from a single struct must either "
+ "use immutable or non-immutable samplers.",
+ i, (uint64_t)*pSampler);
}
- skipCall |= validateSampler(my_data, pSampler, immutable);
+ } else {
+ if (i > 0 && !immutable) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+ (uint64_t)*pSampler, __LINE__,
+ DRAWSTATE_INCONSISTENT_IMMUTABLE_SAMPLER_UPDATE, "DS",
+ "vkUpdateDescriptorSets: Update #%u is an immutable "
+ "sampler, but previous update(s) from this "
+ "VkWriteDescriptorSet struct used a non-immutable "
+ "sampler. All updates from a single struct must either "
+ "use immutable or non-immutable samplers.",
+ i);
+ }
+ immutable = VK_TRUE;
+ pSampler = &(pLayoutBinding->pImmutableSamplers[i]);
}
- // Intentionally fall through here to also validate image stuff
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- for (i=0; i<pWDS->descriptorCount; ++i) {
- skipCall |= validateImageView(my_data, &(pWDS->pImageInfo[i].imageView), pWDS->pImageInfo[i].imageLayout);
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- for (i=0; i<pWDS->descriptorCount; ++i) {
- skipCall |= validateBufferView(my_data, &(pWDS->pTexelBufferView[i]));
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- for (i=0; i<pWDS->descriptorCount; ++i) {
- skipCall |= validateBufferInfo(my_data, &(pWDS->pBufferInfo[i]));
- }
- break;
+ skipCall |= validateSampler(my_data, pSampler, immutable);
+ }
+ // Intentionally fall through here to also validate image stuff
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+ for (i = 0; i < pWDS->descriptorCount; ++i) {
+ skipCall |=
+ validateImageView(my_data, &(pWDS->pImageInfo[i].imageView),
+ pWDS->pImageInfo[i].imageLayout);
+ }
+ break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ for (i = 0; i < pWDS->descriptorCount; ++i) {
+ skipCall |=
+ validateBufferView(my_data, &(pWDS->pTexelBufferView[i]));
+ }
+ break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+ for (i = 0; i < pWDS->descriptorCount; ++i) {
+ skipCall |= validateBufferInfo(my_data, &(pWDS->pBufferInfo[i]));
+ }
+ break;
}
return skipCall;
}
-// Validate that given set is valid and that it's not being used by an in-flight CmdBuffer
+// Validate that given set is valid and that it's not being used by an in-flight
+// CmdBuffer
// func_str is the name of the calling function
// Return VK_FALSE if no errors occur
-// Return VK_TRUE if validation error occurs and callback returns VK_TRUE (to skip upcoming API call down the chain)
-VkBool32 validateIdleDescriptorSet(const layer_data* my_data, VkDescriptorSet set, std::string func_str) {
+// Return VK_TRUE if validation error occurs and callback returns VK_TRUE (to
+// skip upcoming API call down the chain)
+VkBool32 validateIdleDescriptorSet(const layer_data *my_data,
+ VkDescriptorSet set, std::string func_str) {
VkBool32 skip_call = VK_FALSE;
auto set_node = my_data->setMap.find(set);
if (set_node == my_data->setMap.end()) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)(set), __LINE__, DRAWSTATE_DOUBLE_DESTROY, "DS",
- "Cannot call %s() on descriptor set %" PRIxLEAST64 " that has not been allocated.", func_str.c_str(), (uint64_t)(set));
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)(set), __LINE__, DRAWSTATE_DOUBLE_DESTROY, "DS",
+ "Cannot call %s() on descriptor set %" PRIxLEAST64
+ " that has not been allocated.",
+ func_str.c_str(), (uint64_t)(set));
} else {
if (set_node->second->in_use.load()) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)(set), __LINE__, DRAWSTATE_OBJECT_INUSE, "DS",
- "Cannot call %s() on descriptor set %" PRIxLEAST64 " that is in use by a command buffer.", func_str.c_str(), (uint64_t)(set));
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)(set), __LINE__, DRAWSTATE_OBJECT_INUSE, "DS",
+ "Cannot call %s() on descriptor set %" PRIxLEAST64
+ " that is in use by a command buffer.",
+ func_str.c_str(), (uint64_t)(set));
}
}
return skip_call;
}
-static void invalidateBoundCmdBuffers(layer_data* dev_data, const SET_NODE* pSet)
-{
+static void invalidateBoundCmdBuffers(layer_data *dev_data,
+ const SET_NODE *pSet) {
// Flag any CBs this set is bound to as INVALID
for (auto cb : pSet->boundCmdBuffers) {
auto cb_node = dev_data->commandBufferMap.find(cb);
@@ -2275,64 +2884,104 @@
}
}
// update DS mappings based on write and copy update arrays
-static VkBool32 dsUpdate(layer_data* my_data, VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pWDS, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pCDS)
-{
+static VkBool32 dsUpdate(layer_data *my_data, VkDevice device,
+ uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet *pWDS,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet *pCDS) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
- LAYOUT_NODE* pLayout = NULL;
- VkDescriptorSetLayoutCreateInfo* pLayoutCI = NULL;
+ LAYOUT_NODE *pLayout = NULL;
+ VkDescriptorSetLayoutCreateInfo *pLayoutCI = NULL;
// Validate Write updates
uint32_t i = 0;
- for (i=0; i < descriptorWriteCount; i++) {
+ for (i = 0; i < descriptorWriteCount; i++) {
VkDescriptorSet ds = pWDS[i].dstSet;
- SET_NODE* pSet = my_data->setMap[ds];
+ SET_NODE *pSet = my_data->setMap[ds];
// Set being updated cannot be in-flight
- if ((skipCall = validateIdleDescriptorSet(my_data, ds, "VkUpdateDescriptorSets")) == VK_TRUE)
+ if ((skipCall = validateIdleDescriptorSet(
+ my_data, ds, "VkUpdateDescriptorSets")) == VK_TRUE)
return skipCall;
// If set is bound to any cmdBuffers, mark them invalid
invalidateBoundCmdBuffers(my_data, pSet);
- GENERIC_HEADER* pUpdate = (GENERIC_HEADER*) &pWDS[i];
+ GENERIC_HEADER *pUpdate = (GENERIC_HEADER *)&pWDS[i];
pLayout = pSet->pLayout;
// First verify valid update struct
- if ((skipCall = validUpdateStruct(my_data, device, pUpdate)) == VK_TRUE) {
+ if ((skipCall = validUpdateStruct(my_data, device, pUpdate)) ==
+ VK_TRUE) {
break;
}
uint32_t binding = 0, endIndex = 0;
binding = pWDS[i].dstBinding;
// Make sure that layout being updated has the binding being updated
if (pLayout->bindings.find(binding) == pLayout->bindings.end()) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)(ds), __LINE__, DRAWSTATE_INVALID_UPDATE_INDEX, "DS",
- "Descriptor Set %" PRIu64 " does not have binding to match update binding %u for update type %s!", (uint64_t)(ds), binding, string_VkStructureType(pUpdate->sType));
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)(ds),
+ __LINE__, DRAWSTATE_INVALID_UPDATE_INDEX, "DS",
+ "Descriptor Set %" PRIu64 " does not have binding to match "
+ "update binding %u for update type "
+ "%s!",
+ (uint64_t)(ds), binding,
+ string_VkStructureType(pUpdate->sType));
} else {
// Next verify that update falls within size of given binding
- endIndex = getUpdateEndIndex(my_data, device, pLayout, binding, pWDS[i].dstArrayElement, pUpdate);
+ endIndex = getUpdateEndIndex(my_data, device, pLayout, binding,
+ pWDS[i].dstArrayElement, pUpdate);
if (getBindingEndIndex(pLayout, binding) < endIndex) {
pLayoutCI = &pLayout->createInfo;
- string DSstr = vk_print_vkdescriptorsetlayoutcreateinfo(pLayoutCI, "{DS} ");
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)(ds), __LINE__, DRAWSTATE_DESCRIPTOR_UPDATE_OUT_OF_BOUNDS, "DS",
- "Descriptor update type of %s is out of bounds for matching binding %u in Layout w/ CI:\n%s!", string_VkStructureType(pUpdate->sType), binding, DSstr.c_str());
- } else { // TODO : should we skip update on a type mismatch or force it?
+ string DSstr = vk_print_vkdescriptorsetlayoutcreateinfo(
+ pLayoutCI, "{DS} ");
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)(ds), __LINE__,
+ DRAWSTATE_DESCRIPTOR_UPDATE_OUT_OF_BOUNDS, "DS",
+ "Descriptor update type of %s is out of bounds for "
+ "matching binding %u in Layout w/ CI:\n%s!",
+ string_VkStructureType(pUpdate->sType), binding,
+ DSstr.c_str());
+ } else { // TODO : should we skip update on a type mismatch or force
+ // it?
uint32_t startIndex;
- startIndex = getUpdateStartIndex(my_data, device, pLayout, binding, pWDS[i].dstArrayElement, pUpdate);
- // Layout bindings match w/ update, now verify that update type & stageFlags are the same for entire update
- if ((skipCall = validateUpdateConsistency(my_data, device, pLayout, pUpdate, startIndex, endIndex)) == VK_FALSE) {
- // The update is within bounds and consistent, but need to make sure contents make sense as well
- if ((skipCall = validateUpdateContents(my_data, &pWDS[i], &pLayout->createInfo.pBindings[binding])) == VK_FALSE) {
+ startIndex =
+ getUpdateStartIndex(my_data, device, pLayout, binding,
+ pWDS[i].dstArrayElement, pUpdate);
+ // Layout bindings match w/ update, now verify that update type
+ // & stageFlags are the same for entire update
+ if ((skipCall = validateUpdateConsistency(
+ my_data, device, pLayout, pUpdate, startIndex,
+ endIndex)) == VK_FALSE) {
+ // The update is within bounds and consistent, but need to
+ // make sure contents make sense as well
+ if ((skipCall = validateUpdateContents(
+ my_data, &pWDS[i],
+ &pLayout->createInfo.pBindings[binding])) ==
+ VK_FALSE) {
// Update is good. Save the update info
// Create new update struct for this set's shadow copy
- GENERIC_HEADER* pNewNode = NULL;
- skipCall |= shadowUpdateNode(my_data, device, pUpdate, &pNewNode);
+ GENERIC_HEADER *pNewNode = NULL;
+ skipCall |= shadowUpdateNode(my_data, device, pUpdate,
+ &pNewNode);
if (NULL == pNewNode) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)(ds), __LINE__, DRAWSTATE_OUT_OF_MEMORY, "DS",
- "Out of memory while attempting to allocate UPDATE struct in vkUpdateDescriptors()");
+ skipCall |= log_msg(
+ my_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)(ds), __LINE__,
+ DRAWSTATE_OUT_OF_MEMORY, "DS",
+ "Out of memory while attempting to allocate "
+ "UPDATE struct in vkUpdateDescriptors()");
} else {
- // Insert shadow node into LL of updates for this set
+ // Insert shadow node into LL of updates for this
+ // set
pNewNode->pNext = pSet->pUpdateStructs;
pSet->pUpdateStructs = pNewNode;
- // Now update appropriate descriptor(s) to point to new Update node
+ // Now update appropriate descriptor(s) to point to
+ // new Update node
for (uint32_t j = startIndex; j <= endIndex; j++) {
- assert(j<pSet->descriptorCount);
+ assert(j < pSet->descriptorCount);
pSet->ppDescriptors[j] = pNewNode;
}
}
@@ -2342,56 +2991,117 @@
}
}
// Now validate copy updates
- for (i=0; i < descriptorCopyCount; ++i) {
+ for (i = 0; i < descriptorCopyCount; ++i) {
SET_NODE *pSrcSet = NULL, *pDstSet = NULL;
LAYOUT_NODE *pSrcLayout = NULL, *pDstLayout = NULL;
- uint32_t srcStartIndex = 0, srcEndIndex = 0, dstStartIndex = 0, dstEndIndex = 0;
- // For each copy make sure that update falls within given layout and that types match
+ uint32_t srcStartIndex = 0, srcEndIndex = 0, dstStartIndex = 0,
+ dstEndIndex = 0;
+ // For each copy make sure that update falls within given layout and
+ // that types match
pSrcSet = my_data->setMap[pCDS[i].srcSet];
pDstSet = my_data->setMap[pCDS[i].dstSet];
// Set being updated cannot be in-flight
- if ((skipCall = validateIdleDescriptorSet(my_data, pDstSet->set, "VkUpdateDescriptorSets")) == VK_TRUE)
+ if ((skipCall = validateIdleDescriptorSet(
+ my_data, pDstSet->set, "VkUpdateDescriptorSets")) == VK_TRUE)
return skipCall;
invalidateBoundCmdBuffers(my_data, pDstSet);
pSrcLayout = pSrcSet->pLayout;
pDstLayout = pDstSet->pLayout;
// Validate that src binding is valid for src set layout
- if (pSrcLayout->bindings.find(pCDS[i].srcBinding) == pSrcLayout->bindings.end()) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pSrcSet->set, __LINE__, DRAWSTATE_INVALID_UPDATE_INDEX, "DS",
- "Copy descriptor update %u has srcBinding %u which is out of bounds for underlying SetLayout %#" PRIxLEAST64 " which only has bindings 0-%u.",
- i, pCDS[i].srcBinding, (uint64_t) pSrcLayout->layout, pSrcLayout->createInfo.bindingCount-1);
- } else if (pDstLayout->bindings.find(pCDS[i].dstBinding) == pDstLayout->bindings.end()) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDstSet->set, __LINE__, DRAWSTATE_INVALID_UPDATE_INDEX, "DS",
- "Copy descriptor update %u has dstBinding %u which is out of bounds for underlying SetLayout %#" PRIxLEAST64 " which only has bindings 0-%u.",
- i, pCDS[i].dstBinding, (uint64_t) pDstLayout->layout, pDstLayout->createInfo.bindingCount-1);
+ if (pSrcLayout->bindings.find(pCDS[i].srcBinding) ==
+ pSrcLayout->bindings.end()) {
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pSrcSet->set, __LINE__,
+ DRAWSTATE_INVALID_UPDATE_INDEX,
+ "DS", "Copy descriptor update %u has srcBinding %u "
+ "which is out of bounds for underlying SetLayout "
+ "%#" PRIxLEAST64 " which only has bindings 0-%u.",
+ i, pCDS[i].srcBinding, (uint64_t)pSrcLayout->layout,
+ pSrcLayout->createInfo.bindingCount - 1);
+ } else if (pDstLayout->bindings.find(pCDS[i].dstBinding) ==
+ pDstLayout->bindings.end()) {
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDstSet->set, __LINE__,
+ DRAWSTATE_INVALID_UPDATE_INDEX,
+ "DS", "Copy descriptor update %u has dstBinding %u "
+ "which is out of bounds for underlying SetLayout "
+ "%#" PRIxLEAST64 " which only has bindings 0-%u.",
+ i, pCDS[i].dstBinding, (uint64_t)pDstLayout->layout,
+ pDstLayout->createInfo.bindingCount - 1);
} else {
- // Proceed with validation. Bindings are ok, but make sure update is within bounds of given layout
- srcEndIndex = getUpdateEndIndex(my_data, device, pSrcLayout, pCDS[i].srcBinding, pCDS[i].srcArrayElement, (const GENERIC_HEADER*)&(pCDS[i]));
- dstEndIndex = getUpdateEndIndex(my_data, device, pDstLayout, pCDS[i].dstBinding, pCDS[i].dstArrayElement, (const GENERIC_HEADER*)&(pCDS[i]));
- if (getBindingEndIndex(pSrcLayout, pCDS[i].srcBinding) < srcEndIndex) {
+ // Proceed with validation. Bindings are ok, but make sure update is
+ // within bounds of given layout
+ srcEndIndex = getUpdateEndIndex(
+ my_data, device, pSrcLayout, pCDS[i].srcBinding,
+ pCDS[i].srcArrayElement, (const GENERIC_HEADER *)&(pCDS[i]));
+ dstEndIndex = getUpdateEndIndex(
+ my_data, device, pDstLayout, pCDS[i].dstBinding,
+ pCDS[i].dstArrayElement, (const GENERIC_HEADER *)&(pCDS[i]));
+ if (getBindingEndIndex(pSrcLayout, pCDS[i].srcBinding) <
+ srcEndIndex) {
pLayoutCI = &pSrcLayout->createInfo;
- string DSstr = vk_print_vkdescriptorsetlayoutcreateinfo(pLayoutCI, "{DS} ");
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pSrcSet->set, __LINE__, DRAWSTATE_DESCRIPTOR_UPDATE_OUT_OF_BOUNDS, "DS",
- "Copy descriptor src update is out of bounds for matching binding %u in Layout w/ CI:\n%s!", pCDS[i].srcBinding, DSstr.c_str());
- } else if (getBindingEndIndex(pDstLayout, pCDS[i].dstBinding) < dstEndIndex) {
+ string DSstr = vk_print_vkdescriptorsetlayoutcreateinfo(
+ pLayoutCI, "{DS} ");
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pSrcSet->set, __LINE__,
+ DRAWSTATE_DESCRIPTOR_UPDATE_OUT_OF_BOUNDS, "DS",
+ "Copy descriptor src update is out of bounds for "
+ "matching binding %u in Layout w/ CI:\n%s!",
+ pCDS[i].srcBinding, DSstr.c_str());
+ } else if (getBindingEndIndex(pDstLayout, pCDS[i].dstBinding) <
+ dstEndIndex) {
pLayoutCI = &pDstLayout->createInfo;
- string DSstr = vk_print_vkdescriptorsetlayoutcreateinfo(pLayoutCI, "{DS} ");
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDstSet->set, __LINE__, DRAWSTATE_DESCRIPTOR_UPDATE_OUT_OF_BOUNDS, "DS",
- "Copy descriptor dest update is out of bounds for matching binding %u in Layout w/ CI:\n%s!", pCDS[i].dstBinding, DSstr.c_str());
+ string DSstr = vk_print_vkdescriptorsetlayoutcreateinfo(
+ pLayoutCI, "{DS} ");
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDstSet->set, __LINE__,
+ DRAWSTATE_DESCRIPTOR_UPDATE_OUT_OF_BOUNDS, "DS",
+ "Copy descriptor dest update is out of bounds for "
+ "matching binding %u in Layout w/ CI:\n%s!",
+ pCDS[i].dstBinding, DSstr.c_str());
} else {
- srcStartIndex = getUpdateStartIndex(my_data, device, pSrcLayout, pCDS[i].srcBinding, pCDS[i].srcArrayElement, (const GENERIC_HEADER*)&(pCDS[i]));
- dstStartIndex = getUpdateStartIndex(my_data, device, pDstLayout, pCDS[i].dstBinding, pCDS[i].dstArrayElement, (const GENERIC_HEADER*)&(pCDS[i]));
- for (uint32_t j=0; j<pCDS[i].descriptorCount; ++j) {
- // For copy just make sure that the types match and then perform the update
- if (pSrcLayout->descriptorTypes[srcStartIndex+j] != pDstLayout->descriptorTypes[dstStartIndex+j]) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_DESCRIPTOR_TYPE_MISMATCH, "DS",
- "Copy descriptor update index %u, update count #%u, has src update descriptor type %s that does not match overlapping dest descriptor type of %s!",
- i, j+1, string_VkDescriptorType(pSrcLayout->descriptorTypes[srcStartIndex+j]), string_VkDescriptorType(pDstLayout->descriptorTypes[dstStartIndex+j]));
+ srcStartIndex = getUpdateStartIndex(
+ my_data, device, pSrcLayout, pCDS[i].srcBinding,
+ pCDS[i].srcArrayElement,
+ (const GENERIC_HEADER *)&(pCDS[i]));
+ dstStartIndex = getUpdateStartIndex(
+ my_data, device, pDstLayout, pCDS[i].dstBinding,
+ pCDS[i].dstArrayElement,
+ (const GENERIC_HEADER *)&(pCDS[i]));
+ for (uint32_t j = 0; j < pCDS[i].descriptorCount; ++j) {
+ // For copy just make sure that the types match and then
+ // perform the update
+ if (pSrcLayout->descriptorTypes[srcStartIndex + j] !=
+ pDstLayout->descriptorTypes[dstStartIndex + j]) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_DESCRIPTOR_TYPE_MISMATCH, "DS",
+ "Copy descriptor update index %u, update count "
+ "#%u, has src update descriptor type %s that does "
+ "not match overlapping dest descriptor type of %s!",
+ i, j + 1,
+ string_VkDescriptorType(
+ pSrcLayout->descriptorTypes[srcStartIndex + j]),
+ string_VkDescriptorType(
+ pDstLayout
+ ->descriptorTypes[dstStartIndex + j]));
} else {
// point dst descriptor at corresponding src descriptor
- // TODO : This may be a hole. I believe copy should be its own copy,
- // otherwise a subsequent write update to src will incorrectly affect the copy
- pDstSet->ppDescriptors[j+dstStartIndex] = pSrcSet->ppDescriptors[j+srcStartIndex];
+ // TODO : This may be a hole. I believe copy should be
+ // its own copy,
+ // otherwise a subsequent write update to src will
+ // incorrectly affect the copy
+ pDstSet->ppDescriptors[j + dstStartIndex] =
+ pSrcSet->ppDescriptors[j + srcStartIndex];
}
}
}
@@ -2401,27 +3111,49 @@
return skipCall;
}
-// Verify that given pool has descriptors that are being requested for allocation
-static VkBool32 validate_descriptor_availability_in_pool(layer_data* dev_data, DESCRIPTOR_POOL_NODE* pPoolNode, uint32_t count, const VkDescriptorSetLayout* pSetLayouts)
-{
+// Verify that given pool has descriptors that are being requested for
+// allocation
+static VkBool32 validate_descriptor_availability_in_pool(
+ layer_data *dev_data, DESCRIPTOR_POOL_NODE *pPoolNode, uint32_t count,
+ const VkDescriptorSetLayout *pSetLayouts) {
VkBool32 skipCall = VK_FALSE;
uint32_t i = 0, j = 0;
- for (i=0; i<count; ++i) {
- LAYOUT_NODE* pLayout = getLayoutNode(dev_data, pSetLayouts[i]);
+ for (i = 0; i < count; ++i) {
+ LAYOUT_NODE *pLayout = getLayoutNode(dev_data, pSetLayouts[i]);
if (NULL == pLayout) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, (uint64_t) pSetLayouts[i], __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS",
- "Unable to find set layout node for layout %#" PRIxLEAST64 " specified in vkAllocateDescriptorSets() call", (uint64_t) pSetLayouts[i]);
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+ (uint64_t)pSetLayouts[i], __LINE__, DRAWSTATE_INVALID_LAYOUT,
+ "DS", "Unable to find set layout node for layout %#" PRIxLEAST64
+ " specified in vkAllocateDescriptorSets() call",
+ (uint64_t)pSetLayouts[i]);
} else {
uint32_t typeIndex = 0, poolSizeCount = 0;
- for (j=0; j<pLayout->createInfo.bindingCount; ++j) {
- typeIndex = static_cast<uint32_t>(pLayout->createInfo.pBindings[j].descriptorType);
- poolSizeCount = pLayout->createInfo.pBindings[j].descriptorCount;
- if (poolSizeCount > pPoolNode->availableDescriptorTypeCount[typeIndex]) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, (uint64_t) pLayout->layout, __LINE__, DRAWSTATE_DESCRIPTOR_POOL_EMPTY, "DS",
- "Unable to allocate %u descriptors of type %s from pool %#" PRIxLEAST64 ". This pool only has %u descriptors of this type remaining.",
- poolSizeCount, string_VkDescriptorType(pLayout->createInfo.pBindings[j].descriptorType), (uint64_t) pPoolNode->pool, pPoolNode->availableDescriptorTypeCount[typeIndex]);
+ for (j = 0; j < pLayout->createInfo.bindingCount; ++j) {
+ typeIndex = static_cast<uint32_t>(
+ pLayout->createInfo.pBindings[j].descriptorType);
+ poolSizeCount =
+ pLayout->createInfo.pBindings[j].descriptorCount;
+ if (poolSizeCount >
+ pPoolNode->availableDescriptorTypeCount[typeIndex]) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+ (uint64_t)pLayout->layout, __LINE__,
+ DRAWSTATE_DESCRIPTOR_POOL_EMPTY, "DS",
+ "Unable to allocate %u descriptors of type %s from "
+ "pool %#" PRIxLEAST64 ". This pool only has %u "
+ "descriptors of this type "
+ "remaining.",
+ poolSizeCount,
+ string_VkDescriptorType(
+ pLayout->createInfo.pBindings[j].descriptorType),
+ (uint64_t)pPoolNode->pool,
+ pPoolNode->availableDescriptorTypeCount[typeIndex]);
} else { // Decrement available descriptors of this type
- pPoolNode->availableDescriptorTypeCount[typeIndex] -= poolSizeCount;
+ pPoolNode->availableDescriptorTypeCount[typeIndex] -=
+ poolSizeCount;
}
}
}
@@ -2431,56 +3163,49 @@
// Free the shadowed update node for this Set
// NOTE : Calls to this function should be wrapped in mutex
-static void freeShadowUpdateTree(SET_NODE* pSet)
-{
- GENERIC_HEADER* pShadowUpdate = pSet->pUpdateStructs;
+static void freeShadowUpdateTree(SET_NODE *pSet) {
+ GENERIC_HEADER *pShadowUpdate = pSet->pUpdateStructs;
pSet->pUpdateStructs = NULL;
- GENERIC_HEADER* pFreeUpdate = pShadowUpdate;
+ GENERIC_HEADER *pFreeUpdate = pShadowUpdate;
// Clear the descriptor mappings as they will now be invalid
- memset(pSet->ppDescriptors, 0, pSet->descriptorCount*sizeof(GENERIC_HEADER*));
- while(pShadowUpdate) {
+ memset(pSet->ppDescriptors, 0,
+ pSet->descriptorCount * sizeof(GENERIC_HEADER *));
+ while (pShadowUpdate) {
pFreeUpdate = pShadowUpdate;
- pShadowUpdate = (GENERIC_HEADER*)pShadowUpdate->pNext;
+ pShadowUpdate = (GENERIC_HEADER *)pShadowUpdate->pNext;
uint32_t index = 0;
- VkWriteDescriptorSet * pWDS = NULL;
- VkCopyDescriptorSet * pCDS = NULL;
- void** ppToFree = NULL;
- switch (pFreeUpdate->sType)
- {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- pWDS = (VkWriteDescriptorSet*)pFreeUpdate;
- switch (pWDS->descriptorType) {
- case VK_DESCRIPTOR_TYPE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- {
- delete[] pWDS->pImageInfo;
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- {
- delete[] pWDS->pTexelBufferView;
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- {
- delete[] pWDS->pBufferInfo;
- }
- break;
- default:
- break;
- }
- break;
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- break;
+ VkWriteDescriptorSet *pWDS = NULL;
+ VkCopyDescriptorSet *pCDS = NULL;
+ void **ppToFree = NULL;
+ switch (pFreeUpdate->sType) {
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ pWDS = (VkWriteDescriptorSet *)pFreeUpdate;
+ switch (pWDS->descriptorType) {
+ case VK_DESCRIPTOR_TYPE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
+ delete[] pWDS -> pImageInfo;
+ } break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
+ delete[] pWDS -> pTexelBufferView;
+ } break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ delete[] pWDS -> pBufferInfo;
+ } break;
default:
- assert(0);
break;
+ }
+ break;
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ break;
+ default:
+ assert(0);
+ break;
}
delete pFreeUpdate;
}
@@ -2488,13 +3213,13 @@
// Free all DS Pools including their Sets & related sub-structs
// NOTE : Calls to this function should be wrapped in mutex
-static void deletePools(layer_data* my_data)
-{
+static void deletePools(layer_data *my_data) {
if (my_data->descriptorPoolMap.size() <= 0)
return;
- for (auto ii=my_data->descriptorPoolMap.begin(); ii!=my_data->descriptorPoolMap.end(); ++ii) {
- SET_NODE* pSet = (*ii).second->pSets;
- SET_NODE* pFreeSet = pSet;
+ for (auto ii = my_data->descriptorPoolMap.begin();
+ ii != my_data->descriptorPoolMap.end(); ++ii) {
+ SET_NODE *pSet = (*ii).second->pSets;
+ SET_NODE *pFreeSet = pSet;
while (pSet) {
pFreeSet = pSet;
pSet = pSet->pNext;
@@ -2502,7 +3227,7 @@
// Free Update shadow struct tree
freeShadowUpdateTree(pFreeSet);
if (pFreeSet->ppDescriptors) {
- delete[] pFreeSet->ppDescriptors;
+ delete[] pFreeSet -> ppDescriptors;
}
delete pFreeSet;
}
@@ -2511,20 +3236,22 @@
my_data->descriptorPoolMap.clear();
}
-// WARN : Once deleteLayouts() called, any layout ptrs in Pool/Set data structure will be invalid
+// WARN : Once deleteLayouts() called, any layout ptrs in Pool/Set data
+// structure will be invalid
// NOTE : Calls to this function should be wrapped in mutex
-static void deleteLayouts(layer_data* my_data)
-{
+static void deleteLayouts(layer_data *my_data) {
if (my_data->descriptorSetLayoutMap.size() <= 0)
return;
- for (auto ii=my_data->descriptorSetLayoutMap.begin(); ii!=my_data->descriptorSetLayoutMap.end(); ++ii) {
- LAYOUT_NODE* pLayout = (*ii).second;
+ for (auto ii = my_data->descriptorSetLayoutMap.begin();
+ ii != my_data->descriptorSetLayoutMap.end(); ++ii) {
+ LAYOUT_NODE *pLayout = (*ii).second;
if (pLayout->createInfo.pBindings) {
- for (uint32_t i=0; i<pLayout->createInfo.bindingCount; i++) {
+ for (uint32_t i = 0; i < pLayout->createInfo.bindingCount; i++) {
if (pLayout->createInfo.pBindings[i].pImmutableSamplers)
- delete[] pLayout->createInfo.pBindings[i].pImmutableSamplers;
+ delete[] pLayout -> createInfo.pBindings[i]
+ .pImmutableSamplers;
}
- delete[] pLayout->createInfo.pBindings;
+ delete[] pLayout -> createInfo.pBindings;
}
delete pLayout;
}
@@ -2533,9 +3260,8 @@
// Currently clearing a set is removing all previous updates to that set
// TODO : Validate if this is correct clearing behavior
-static void clearDescriptorSet(layer_data* my_data, VkDescriptorSet set)
-{
- SET_NODE* pSet = getSetNode(my_data, set);
+static void clearDescriptorSet(layer_data *my_data, VkDescriptorSet set) {
+ SET_NODE *pSet = getSetNode(my_data, set);
if (!pSet) {
// TODO : Return error
} else {
@@ -2545,36 +3271,47 @@
}
}
-static void clearDescriptorPool(layer_data* my_data, const VkDevice device, const VkDescriptorPool pool, VkDescriptorPoolResetFlags flags)
-{
- DESCRIPTOR_POOL_NODE* pPool = getPoolNode(my_data, pool);
+static void clearDescriptorPool(layer_data *my_data, const VkDevice device,
+ const VkDescriptorPool pool,
+ VkDescriptorPoolResetFlags flags) {
+ DESCRIPTOR_POOL_NODE *pPool = getPoolNode(my_data, pool);
if (!pPool) {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, (uint64_t) pool, __LINE__, DRAWSTATE_INVALID_POOL, "DS",
- "Unable to find pool node for pool %#" PRIxLEAST64 " specified in vkResetDescriptorPool() call", (uint64_t) pool);
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, (uint64_t)pool,
+ __LINE__, DRAWSTATE_INVALID_POOL, "DS",
+ "Unable to find pool node for pool %#" PRIxLEAST64
+ " specified in vkResetDescriptorPool() call",
+ (uint64_t)pool);
} else {
// TODO: validate flags
// For every set off of this pool, clear it
- SET_NODE* pSet = pPool->pSets;
+ SET_NODE *pSet = pPool->pSets;
while (pSet) {
clearDescriptorSet(my_data, pSet->set);
pSet = pSet->pNext;
}
// Reset available count to max count for this pool
- for (uint32_t i=0; i<pPool->availableDescriptorTypeCount.size(); ++i) {
- pPool->availableDescriptorTypeCount[i] = pPool->maxDescriptorTypeCount[i];
+ for (uint32_t i = 0; i < pPool->availableDescriptorTypeCount.size();
+ ++i) {
+ pPool->availableDescriptorTypeCount[i] =
+ pPool->maxDescriptorTypeCount[i];
}
}
}
// For given CB object, fetch associated CB Node from map
-static GLOBAL_CB_NODE* getCBNode(layer_data* my_data, const VkCommandBuffer cb)
-{
+static GLOBAL_CB_NODE *getCBNode(layer_data *my_data,
+ const VkCommandBuffer cb) {
loader_platform_thread_lock_mutex(&globalLock);
if (my_data->commandBufferMap.count(cb) == 0) {
loader_platform_thread_unlock_mutex(&globalLock);
// TODO : How to pass cb as srcObj here?
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
- "Attempt to use CommandBuffer %#" PRIxLEAST64 " that doesn't exist!", (uint64_t)(cb));
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
+ "Attempt to use CommandBuffer %#" PRIxLEAST64
+ " that doesn't exist!",
+ (uint64_t)(cb));
return NULL;
}
loader_platform_thread_unlock_mutex(&globalLock);
@@ -2583,44 +3320,63 @@
// Free all CB Nodes
// NOTE : Calls to this function should be wrapped in mutex
-static void deleteCommandBuffers(layer_data* my_data)
-{
+static void deleteCommandBuffers(layer_data *my_data) {
if (my_data->commandBufferMap.size() <= 0) {
return;
}
- for (auto ii=my_data->commandBufferMap.begin(); ii!=my_data->commandBufferMap.end(); ++ii) {
+ for (auto ii = my_data->commandBufferMap.begin();
+ ii != my_data->commandBufferMap.end(); ++ii) {
delete (*ii).second;
}
my_data->commandBufferMap.clear();
}
-static VkBool32 report_error_no_cb_begin(const layer_data* dev_data, const VkCommandBuffer cb, const char* caller_name)
-{
- return log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)cb, __LINE__, DRAWSTATE_NO_BEGIN_COMMAND_BUFFER, "DS",
- "You must call vkBeginCommandBuffer() before this call to %s", caller_name);
+static VkBool32 report_error_no_cb_begin(const layer_data *dev_data,
+ const VkCommandBuffer cb,
+ const char *caller_name) {
+ return log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)cb, __LINE__,
+ DRAWSTATE_NO_BEGIN_COMMAND_BUFFER, "DS",
+ "You must call vkBeginCommandBuffer() before this call to %s",
+ caller_name);
}
-VkBool32 validateCmdsInCmdBuffer(const layer_data* dev_data, const GLOBAL_CB_NODE* pCB, const CMD_TYPE cmd_type) {
- if (!pCB->activeRenderPass) return VK_FALSE;
+VkBool32 validateCmdsInCmdBuffer(const layer_data *dev_data,
+ const GLOBAL_CB_NODE *pCB,
+ const CMD_TYPE cmd_type) {
+ if (!pCB->activeRenderPass)
+ return VK_FALSE;
VkBool32 skip_call = VK_FALSE;
- if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS && cmd_type != CMD_EXECUTECOMMANDS) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_COMMAND_BUFFER, "DS", "Commands cannot be called in a subpass using secondary command buffers.");
- } else if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_INLINE && cmd_type == CMD_EXECUTECOMMANDS) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_COMMAND_BUFFER, "DS", "vkCmdExecuteCommands() cannot be called in a subpass using inline commands.");
+ if (pCB->activeSubpassContents ==
+ VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS &&
+ cmd_type != CMD_EXECUTECOMMANDS) {
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
+ "Commands cannot be called in a subpass using secondary "
+ "command buffers.");
+ } else if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_INLINE &&
+ cmd_type == CMD_EXECUTECOMMANDS) {
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
+ "vkCmdExecuteCommands() cannot be called in a subpass "
+ "using inline commands.");
}
return skip_call;
}
// Add specified CMD to the CmdBuffer in given pCB, flagging errors if CB is not
// in the recording state or if there's an issue with the Cmd ordering
-static VkBool32 addCmd(const layer_data* my_data, GLOBAL_CB_NODE* pCB, const CMD_TYPE cmd, const char* caller_name)
-{
+static VkBool32 addCmd(const layer_data *my_data, GLOBAL_CB_NODE *pCB,
+ const CMD_TYPE cmd, const char *caller_name) {
VkBool32 skipCall = VK_FALSE;
if (pCB->state != CB_RECORDING) {
- skipCall |= report_error_no_cb_begin(my_data, pCB->commandBuffer, caller_name);
+ skipCall |=
+ report_error_no_cb_begin(my_data, pCB->commandBuffer, caller_name);
skipCall |= validateCmdsInCmdBuffer(my_data, pCB, cmd);
CMD_NODE cmdNode = {};
// init cmd node and append to end of cmd LL
@@ -2632,15 +3388,15 @@
}
// Reset the command buffer state
// Maintain the createInfo and set state to CB_NEW, but clear all other state
-static void resetCB(layer_data* my_data, const VkCommandBuffer cb)
-{
- GLOBAL_CB_NODE* pCB = my_data->commandBufferMap[cb];
+static void resetCB(layer_data *my_data, const VkCommandBuffer cb) {
+ GLOBAL_CB_NODE *pCB = my_data->commandBufferMap[cb];
if (pCB) {
pCB->cmds.clear();
// Reset CB state (note that createInfo is not cleared)
pCB->commandBuffer = cb;
memset(&pCB->beginInfo, 0, sizeof(VkCommandBufferBeginInfo));
- memset(&pCB->inheritanceInfo, 0, sizeof(VkCommandBufferInheritanceInfo));
+ memset(&pCB->inheritanceInfo, 0,
+ sizeof(VkCommandBufferInheritanceInfo));
pCB->fence = 0;
pCB->numCmds = 0;
memset(pCB->drawCount, 0, NUM_DRAW_TYPES * sizeof(uint64_t));
@@ -2684,8 +3440,7 @@
}
// Set PSO-related status bits for CB, including dynamic state set via PSO
-static void set_cb_pso_status(GLOBAL_CB_NODE* pCB, const PIPELINE_NODE* pPipe)
-{
+static void set_cb_pso_status(GLOBAL_CB_NODE *pCB, const PIPELINE_NODE *pPipe) {
for (uint32_t i = 0; i < pPipe->cbStateCI.attachmentCount; i++) {
if (0 != pPipe->pAttachments[i].colorWriteMask) {
pCB->status |= CBSTATUS_COLOR_BLEND_WRITE_ENABLE;
@@ -2705,38 +3460,38 @@
// Then unset any state that's noted as dynamic in PSO
// Finally OR that into CB statemask
CBStatusFlags psoDynStateMask = CBSTATUS_ALL;
- for (uint32_t i=0; i < pPipe->dynStateCI.dynamicStateCount; i++) {
+ for (uint32_t i = 0; i < pPipe->dynStateCI.dynamicStateCount; i++) {
switch (pPipe->dynStateCI.pDynamicStates[i]) {
- case VK_DYNAMIC_STATE_VIEWPORT:
- psoDynStateMask &= ~CBSTATUS_VIEWPORT_SET;
- break;
- case VK_DYNAMIC_STATE_SCISSOR:
- psoDynStateMask &= ~CBSTATUS_SCISSOR_SET;
- break;
- case VK_DYNAMIC_STATE_LINE_WIDTH:
- psoDynStateMask &= ~CBSTATUS_LINE_WIDTH_SET;
- break;
- case VK_DYNAMIC_STATE_DEPTH_BIAS:
- psoDynStateMask &= ~CBSTATUS_DEPTH_BIAS_SET;
- break;
- case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
- psoDynStateMask &= ~CBSTATUS_BLEND_SET;
- break;
- case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
- psoDynStateMask &= ~CBSTATUS_DEPTH_BOUNDS_SET;
- break;
- case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
- psoDynStateMask &= ~CBSTATUS_STENCIL_READ_MASK_SET;
- break;
- case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
- psoDynStateMask &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
- break;
- case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
- psoDynStateMask &= ~CBSTATUS_STENCIL_REFERENCE_SET;
- break;
- default:
- // TODO : Flag error here
- break;
+ case VK_DYNAMIC_STATE_VIEWPORT:
+ psoDynStateMask &= ~CBSTATUS_VIEWPORT_SET;
+ break;
+ case VK_DYNAMIC_STATE_SCISSOR:
+ psoDynStateMask &= ~CBSTATUS_SCISSOR_SET;
+ break;
+ case VK_DYNAMIC_STATE_LINE_WIDTH:
+ psoDynStateMask &= ~CBSTATUS_LINE_WIDTH_SET;
+ break;
+ case VK_DYNAMIC_STATE_DEPTH_BIAS:
+ psoDynStateMask &= ~CBSTATUS_DEPTH_BIAS_SET;
+ break;
+ case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
+ psoDynStateMask &= ~CBSTATUS_BLEND_SET;
+ break;
+ case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
+ psoDynStateMask &= ~CBSTATUS_DEPTH_BOUNDS_SET;
+ break;
+ case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
+ psoDynStateMask &= ~CBSTATUS_STENCIL_READ_MASK_SET;
+ break;
+ case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
+ psoDynStateMask &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
+ break;
+ case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
+ psoDynStateMask &= ~CBSTATUS_STENCIL_REFERENCE_SET;
+ break;
+ default:
+ // TODO : Flag error here
+ break;
}
}
pCB->status |= psoDynStateMask;
@@ -2744,91 +3499,125 @@
}
// Print the last bound Gfx Pipeline
-static VkBool32 printPipeline(layer_data* my_data, const VkCommandBuffer cb)
-{
+static VkBool32 printPipeline(layer_data *my_data, const VkCommandBuffer cb) {
VkBool32 skipCall = VK_FALSE;
- GLOBAL_CB_NODE* pCB = getCBNode(my_data, cb);
+ GLOBAL_CB_NODE *pCB = getCBNode(my_data, cb);
if (pCB) {
PIPELINE_NODE *pPipeTrav = getPipeline(my_data, pCB->lastBoundPipeline);
if (!pPipeTrav) {
// nothing to print
} else {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "%s", vk_print_vkgraphicspipelinecreateinfo(&pPipeTrav->graphicsPipelineCI, "{DS}").c_str());
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_NONE, "DS", "%s",
+ vk_print_vkgraphicspipelinecreateinfo(
+ &pPipeTrav->graphicsPipelineCI, "{DS}").c_str());
}
}
return skipCall;
}
// Print details of DS config to stdout
-static VkBool32 printDSConfig(layer_data* my_data, const VkCommandBuffer cb)
-{
+static VkBool32 printDSConfig(layer_data *my_data, const VkCommandBuffer cb) {
VkBool32 skipCall = VK_FALSE;
- char ds_config_str[1024*256] = {0}; // TODO : Currently making this buffer HUGE w/o overrun protection. Need to be smarter, start smaller, and grow as needed.
- GLOBAL_CB_NODE* pCB = getCBNode(my_data, cb);
+ char ds_config_str[1024 * 256] = {0}; // TODO : Currently making this buffer
+ // HUGE w/o overrun protection. Need
+ // to be smarter, start smaller, and
+ // grow as needed.
+ GLOBAL_CB_NODE *pCB = getCBNode(my_data, cb);
if (pCB && pCB->lastBoundDescriptorSet) {
- SET_NODE* pSet = getSetNode(my_data, pCB->lastBoundDescriptorSet);
- DESCRIPTOR_POOL_NODE* pPool = getPoolNode(my_data, pSet->pool);
+ SET_NODE *pSet = getSetNode(my_data, pCB->lastBoundDescriptorSet);
+ DESCRIPTOR_POOL_NODE *pPool = getPoolNode(my_data, pSet->pool);
// Print out pool details
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "Details for pool %#" PRIxLEAST64 ".", (uint64_t) pPool->pool);
- string poolStr = vk_print_vkdescriptorpoolcreateinfo(&pPool->createInfo, " ");
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "%s", poolStr.c_str());
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_NONE, "DS",
+ "Details for pool %#" PRIxLEAST64 ".", (uint64_t)pPool->pool);
+ string poolStr =
+ vk_print_vkdescriptorpoolcreateinfo(&pPool->createInfo, " ");
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_NONE, "DS", "%s", poolStr.c_str());
// Print out set details
char prefix[10];
uint32_t index = 0;
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "Details for descriptor set %#" PRIxLEAST64 ".", (uint64_t) pSet->set);
- LAYOUT_NODE* pLayout = pSet->pLayout;
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_NONE,
+ "DS", "Details for descriptor set %#" PRIxLEAST64 ".",
+ (uint64_t)pSet->set);
+ LAYOUT_NODE *pLayout = pSet->pLayout;
// Print layout details
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "Layout #%u, (object %#" PRIxLEAST64 ") for DS %#" PRIxLEAST64 ".", index+1, (uint64_t)(pLayout->layout), (uint64_t)(pSet->set));
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_NONE, "DS",
+ "Layout #%u, (object %#" PRIxLEAST64 ") for DS %#" PRIxLEAST64 ".",
+ index + 1, (uint64_t)(pLayout->layout), (uint64_t)(pSet->set));
sprintf(prefix, " [L%u] ", index);
- string DSLstr = vk_print_vkdescriptorsetlayoutcreateinfo(&pLayout->createInfo, prefix).c_str();
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "%s", DSLstr.c_str());
+ string DSLstr = vk_print_vkdescriptorsetlayoutcreateinfo(
+ &pLayout->createInfo, prefix).c_str();
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_NONE, "DS", "%s", DSLstr.c_str());
index++;
- GENERIC_HEADER* pUpdate = pSet->pUpdateStructs;
+ GENERIC_HEADER *pUpdate = pSet->pUpdateStructs;
if (pUpdate) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "Update Chain [UC] for descriptor set %#" PRIxLEAST64 ":", (uint64_t) pSet->set);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_NONE,
+ "DS", "Update Chain [UC] for descriptor set %#" PRIxLEAST64 ":",
+ (uint64_t)pSet->set);
sprintf(prefix, " [UC] ");
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "%s", dynamic_display(pUpdate, prefix).c_str());
- // TODO : If there is a "view" associated with this update, print CI for that view
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_NONE,
+ "DS", "%s", dynamic_display(pUpdate, prefix).c_str());
+ // TODO : If there is a "view" associated with this update, print CI
+ // for that view
} else {
if (0 != pSet->descriptorCount) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "No Update Chain for descriptor set %#" PRIxLEAST64 " which has %u descriptors (vkUpdateDescriptors has not been called)", (uint64_t) pSet->set, pSet->descriptorCount);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_NONE,
+ "DS", "No Update Chain for descriptor set %#" PRIxLEAST64
+ " which has %u descriptors (vkUpdateDescriptors has "
+ "not been called)",
+ (uint64_t)pSet->set, pSet->descriptorCount);
} else {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "FYI: No descriptors in descriptor set %#" PRIxLEAST64 ".", (uint64_t) pSet->set);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_NONE,
+ "DS",
+ "FYI: No descriptors in descriptor set %#" PRIxLEAST64 ".",
+ (uint64_t)pSet->set);
}
}
}
return skipCall;
}
-static void printCB(layer_data* my_data, const VkCommandBuffer cb)
-{
- GLOBAL_CB_NODE* pCB = getCBNode(my_data, cb);
+static void printCB(layer_data *my_data, const VkCommandBuffer cb) {
+ GLOBAL_CB_NODE *pCB = getCBNode(my_data, cb);
if (pCB && pCB->cmds.size() > 0) {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "Cmds in CB %p", (void*)cb);
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_NONE,
+ "DS", "Cmds in CB %p", (void *)cb);
vector<CMD_NODE> cmds = pCB->cmds;
- for (auto ii=cmds.begin(); ii!=cmds.end(); ++ii) {
+ for (auto ii = cmds.begin(); ii != cmds.end(); ++ii) {
// TODO : Need to pass cb as srcObj here
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_NONE, "DS",
- " CMD#%" PRIu64 ": %s", (*ii).cmdNumber, cmdTypeToString((*ii).type).c_str());
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_NONE, "DS", " CMD#%" PRIu64 ": %s",
+ (*ii).cmdNumber, cmdTypeToString((*ii).type).c_str());
}
} else {
// Nothing to print
}
}
-static VkBool32 synchAndPrintDSConfig(layer_data* my_data, const VkCommandBuffer cb)
-{
+static VkBool32 synchAndPrintDSConfig(layer_data *my_data,
+ const VkCommandBuffer cb) {
VkBool32 skipCall = VK_FALSE;
if (!(my_data->report_data->active_flags & VK_DEBUG_REPORT_INFO_BIT_EXT)) {
return skipCall;
@@ -2838,39 +3627,49 @@
return skipCall;
}
-// Flags validation error if the associated call is made inside a render pass. The apiName
+// Flags validation error if the associated call is made inside a render pass.
+// The apiName
// routine should ONLY be called outside a render pass.
-static VkBool32 insideRenderPass(const layer_data* my_data, GLOBAL_CB_NODE *pCB, const char *apiName)
-{
+static VkBool32 insideRenderPass(const layer_data *my_data, GLOBAL_CB_NODE *pCB,
+ const char *apiName) {
VkBool32 inside = VK_FALSE;
if (pCB->activeRenderPass) {
- inside = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)pCB->commandBuffer, __LINE__, DRAWSTATE_INVALID_RENDERPASS_CMD, "DS",
- "%s: It is invalid to issue this call inside an active render pass (%#" PRIxLEAST64 ")",
- apiName, (uint64_t) pCB->activeRenderPass);
+ inside = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)pCB->commandBuffer, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS_CMD, "DS",
+ "%s: It is invalid to issue this call inside an "
+ "active render pass (%#" PRIxLEAST64 ")",
+ apiName, (uint64_t)pCB->activeRenderPass);
}
return inside;
}
-// Flags validation error if the associated call is made outside a render pass. The apiName
+// Flags validation error if the associated call is made outside a render pass.
+// The apiName
// routine should ONLY be called inside a render pass.
-static VkBool32 outsideRenderPass(const layer_data* my_data, GLOBAL_CB_NODE *pCB, const char *apiName)
-{
+static VkBool32 outsideRenderPass(const layer_data *my_data,
+ GLOBAL_CB_NODE *pCB, const char *apiName) {
VkBool32 outside = VK_FALSE;
- if (((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
- (!pCB->activeRenderPass)) ||
+ if (((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) &&
+ (!pCB->activeRenderPass)) ||
((pCB->createInfo.level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) &&
- (!pCB->activeRenderPass) &&
- !(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT))) {
- outside = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)pCB->commandBuffer, __LINE__, DRAWSTATE_NO_ACTIVE_RENDERPASS, "DS",
- "%s: This call must be issued inside an active render pass.", apiName);
+ (!pCB->activeRenderPass) &&
+ !(pCB->beginInfo.flags &
+ VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT))) {
+ outside = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)pCB->commandBuffer, __LINE__,
+ DRAWSTATE_NO_ACTIVE_RENDERPASS, "DS",
+ "%s: This call must be issued inside an active render pass.",
+ apiName);
}
return outside;
}
-static void init_draw_state(layer_data *my_data, const VkAllocationCallbacks *pAllocator)
-{
+static void init_draw_state(layer_data *my_data,
+ const VkAllocationCallbacks *pAllocator) {
uint32_t report_flags = 0;
uint32_t debug_action = 0;
FILE *log_output = NULL;
@@ -2878,10 +3677,9 @@
VkDebugReportCallbackEXT callback;
// initialize DrawState options
report_flags = getLayerOptionFlags("DrawStateReportFlags", 0);
- getLayerOptionEnum("DrawStateDebugAction", (uint32_t *) &debug_action);
+ getLayerOptionEnum("DrawStateDebugAction", (uint32_t *)&debug_action);
- if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG)
- {
+ if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
option_str = getLayerOption("DrawStateLogFilename");
log_output = getLayerLogOutput(option_str, "DrawState");
VkDebugReportCallbackCreateInfoEXT dbgInfo;
@@ -2890,7 +3688,8 @@
dbgInfo.pfnCallback = log_callback;
dbgInfo.pUserData = log_output;
dbgInfo.flags = report_flags;
- layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator, &callback);
+ layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator,
+ &callback);
my_data->logging_callback.push_back(callback);
}
@@ -2901,24 +3700,29 @@
dbgInfo.pfnCallback = win32_debug_output_msg;
dbgInfo.pUserData = log_output;
dbgInfo.flags = report_flags;
- layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator, &callback);
+ layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator,
+ &callback);
my_data->logging_callback.push_back(callback);
}
- if (!globalLockInitialized)
- {
+ if (!globalLockInitialized) {
loader_platform_thread_create_mutex(&globalLock);
globalLockInitialized = 1;
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance)
-{
- VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkInstance *pInstance) {
+ VkLayerInstanceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkCreateInstance fpCreateInstance =
+ (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
if (fpCreateInstance == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -2930,15 +3734,16 @@
if (result != VK_SUCCESS)
return result;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
my_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
- layer_init_instance_dispatch_table(*pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
+ layer_init_instance_dispatch_table(
+ *pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
my_data->report_data = debug_report_create_instance(
- my_data->instance_dispatch_table,
- *pInstance,
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
+ my_data->instance_dispatch_table, *pInstance,
+ pCreateInfo->enabledExtensionCount,
+ pCreateInfo->ppEnabledExtensionNames);
init_draw_state(my_data, pAllocator);
@@ -2946,8 +3751,9 @@
}
/* hook DestroyInstance to remove tableInstanceMap entry */
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyInstance(VkInstance instance,
+ const VkAllocationCallbacks *pAllocator) {
// TODOSC : Shouldn't need any customization here
dispatch_key key = get_dispatch_key(instance);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
@@ -2964,7 +3770,8 @@
layer_debug_report_destroy_instance(my_data->report_data);
delete my_data->instance_dispatch_table;
layer_data_map.erase(key);
- // TODO : Potential race here with separate threads creating/destroying instance
+ // TODO : Potential race here with separate threads creating/destroying
+ // instance
if (layer_data_map.empty()) {
// Release mutex when destroying last instance.
loader_platform_thread_delete_mutex(&globalLock);
@@ -2972,44 +3779,57 @@
}
}
-static void createDeviceRegisterExtensions(const VkDeviceCreateInfo* pCreateInfo, VkDevice device)
-{
+static void
+createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo,
+ VkDevice device) {
uint32_t i;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
dev_data->device_extensions.debug_marker_enabled = false;
dev_data->device_extensions.wsi_enabled = false;
+ VkLayerDispatchTable *pDisp = dev_data->device_dispatch_table;
+ PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
- VkLayerDispatchTable *pDisp = dev_data->device_dispatch_table;
- PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
-
- pDisp->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
- pDisp->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
- pDisp->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
- pDisp->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
- pDisp->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
+ pDisp->CreateSwapchainKHR =
+ (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
+ pDisp->DestroySwapchainKHR =
+ (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
+ pDisp->GetSwapchainImagesKHR =
+ (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
+ pDisp->AcquireNextImageKHR =
+ (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
+ pDisp->QueuePresentKHR =
+ (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
dev_data->device_extensions.wsi_enabled = true;
}
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], DEBUG_MARKER_EXTENSION_NAME) == 0) {
- /* Found a matching extension name, mark it enabled and init dispatch table*/
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ DEBUG_MARKER_EXTENSION_NAME) == 0) {
+ /* Found a matching extension name, mark it enabled and init
+ * dispatch table*/
dev_data->device_extensions.debug_marker_enabled = true;
initDebugMarkerTable(device);
-
}
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice)
-{
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
+ VkLayerDeviceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+ PFN_vkCreateDevice fpCreateDevice =
+ (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
if (fpCreateDevice == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -3022,27 +3842,32 @@
return result;
}
- layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
+ layer_data *my_instance_data =
+ get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
// Setup device dispatch table
my_device_data->device_dispatch_table = new VkLayerDispatchTable;
- layer_init_device_dispatch_table(*pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
+ layer_init_device_dispatch_table(
+ *pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
- my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
+ my_device_data->report_data = layer_debug_report_create_device(
+ my_instance_data->report_data, *pDevice);
createDeviceRegisterExtensions(pCreateInfo, *pDevice);
// Get physical device limits for this device
- my_instance_data->instance_dispatch_table->GetPhysicalDeviceProperties(gpu, &(my_instance_data->physDevPropertyMap[*pDevice]));
+ my_instance_data->instance_dispatch_table->GetPhysicalDeviceProperties(
+ gpu, &(my_instance_data->physDevPropertyMap[*pDevice]));
return result;
}
// prototype
-static void deleteRenderPasses(layer_data*);
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator)
-{
+static void deleteRenderPasses(layer_data *);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
// TODOSC : Shouldn't need any customization here
dispatch_key key = get_dispatch_key(device);
- layer_data* dev_data = get_my_data_ptr(key, layer_data_map);
+ layer_data *dev_data = get_my_data_ptr(key, layer_data_map);
// Free all the memory
loader_platform_thread_lock_mutex(&globalLock);
deletePipelines(dev_data);
@@ -3063,114 +3888,113 @@
}
static const VkExtensionProperties instance_extensions[] = {
- {
- VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
- VK_EXT_DEBUG_REPORT_SPEC_VERSION
- }
-};
+ {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
- const char *pLayerName,
- uint32_t *pCount,
- VkExtensionProperties* pProperties)
-{
- return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceExtensionProperties(const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
+ return util_GetExtensionProperties(1, instance_extensions, pCount,
+ pProperties);
}
-static const VkLayerProperties ds_global_layers[] = {
- {
- "VK_LAYER_LUNARG_draw_state",
- VK_API_VERSION,
- VK_MAKE_VERSION(0, 1, 0),
- "Validation layer: draw_state",
- }
-};
+static const VkLayerProperties ds_global_layers[] = {{
+ "VK_LAYER_LUNARG_draw_state", VK_API_VERSION, VK_MAKE_VERSION(0, 1, 0),
+ "Validation layer: draw_state",
+}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
- uint32_t *pCount,
- VkLayerProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+ VkLayerProperties *pProperties) {
return util_GetLayerProperties(ARRAY_SIZE(ds_global_layers),
- ds_global_layers,
- pCount, pProperties);
+ ds_global_layers, pCount, pProperties);
}
-static const VkExtensionProperties ds_device_extensions[] = {
- {
- DEBUG_MARKER_EXTENSION_NAME,
- VK_MAKE_VERSION(0, 1, 0),
- }
-};
+static const VkExtensionProperties ds_device_extensions[] = {{
+ DEBUG_MARKER_EXTENSION_NAME, VK_MAKE_VERSION(0, 1, 0),
+}};
-static const VkLayerProperties ds_device_layers[] = {
- {
- "VK_LAYER_LUNARG_draw_state",
- VK_API_VERSION,
- VK_MAKE_VERSION(0, 1, 0),
- "Validation layer: draw_state",
- }
-};
+static const VkLayerProperties ds_device_layers[] = {{
+ "VK_LAYER_LUNARG_draw_state", VK_API_VERSION, VK_MAKE_VERSION(0, 1, 0),
+ "Validation layer: draw_state",
+}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pCount,
- VkExtensionProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+ const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
// DrawState does not have any physical device extensions
if (pLayerName == NULL) {
dispatch_key key = get_dispatch_key(physicalDevice);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
- return my_data->instance_dispatch_table->EnumerateDeviceExtensionProperties(
- physicalDevice,
- NULL,
- pCount,
- pProperties);
+ return my_data->instance_dispatch_table
+ ->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount,
+ pProperties);
} else {
return util_GetExtensionProperties(ARRAY_SIZE(ds_device_extensions),
- ds_device_extensions,
- pCount, pProperties);
+ ds_device_extensions, pCount,
+ pProperties);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCount,
- VkLayerProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
+ uint32_t *pCount,
+ VkLayerProperties *pProperties) {
/* DrawState physical device layers are the same as global */
- return util_GetLayerProperties(ARRAY_SIZE(ds_device_layers), ds_device_layers,
- pCount, pProperties);
+ return util_GetLayerProperties(ARRAY_SIZE(ds_device_layers),
+ ds_device_layers, pCount, pProperties);
}
VkBool32 ValidateCmdBufImageLayouts(VkCommandBuffer cmdBuffer) {
VkBool32 skip_call = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, cmdBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, cmdBuffer);
for (auto cb_image_data : pCB->imageLayoutMap) {
auto image_data = dev_data->imageLayoutMap.find(cb_image_data.first);
if (image_data == dev_data->imageLayoutMap.end()) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Cannot submit cmd buffer using deleted image %" PRIu64 ".", (uint64_t)(cb_image_data.first));
+ skip_call |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Cannot submit cmd buffer using deleted image %" PRIu64 ".",
+ (uint64_t)(cb_image_data.first));
} else {
- if (dev_data->imageLayoutMap[cb_image_data.first]->layout != cb_image_data.second.initialLayout) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Cannot submit cmd buffer using image with layout %d when first use is %d.", dev_data->imageLayoutMap[cb_image_data.first]->layout, cb_image_data.second.initialLayout);
+ if (dev_data->imageLayoutMap[cb_image_data.first]->layout !=
+ cb_image_data.second.initialLayout) {
+ skip_call |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Cannot submit cmd buffer using image with layout %d when "
+ "first use is %d.",
+ dev_data->imageLayoutMap[cb_image_data.first]->layout,
+ cb_image_data.second.initialLayout);
}
- dev_data->imageLayoutMap[cb_image_data.first]->layout = cb_image_data.second.layout;
+ dev_data->imageLayoutMap[cb_image_data.first]->layout =
+ cb_image_data.second.layout;
}
}
return skip_call;
}
-// Track which resources are in-flight by atomically incrementing their "in_use" count
-VkBool32 validateAndIncrementResources(layer_data* my_data, GLOBAL_CB_NODE* pCB) {
+// Track which resources are in-flight by atomically incrementing their "in_use"
+// count
+VkBool32 validateAndIncrementResources(layer_data *my_data,
+ GLOBAL_CB_NODE *pCB) {
VkBool32 skip_call = VK_FALSE;
for (auto drawDataElement : pCB->drawData) {
for (auto buffer : drawDataElement.buffers) {
auto buffer_data = my_data->bufferMap.find(buffer);
if (buffer_data == my_data->bufferMap.end()) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, (uint64_t)(buffer), __LINE__, DRAWSTATE_INVALID_BUFFER, "DS",
- "Cannot submit cmd buffer using deleted buffer %" PRIu64 ".", (uint64_t)(buffer));
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, (uint64_t)(buffer),
+ __LINE__, DRAWSTATE_INVALID_BUFFER, "DS",
+ "Cannot submit cmd buffer using deleted buffer %" PRIu64
+ ".",
+ (uint64_t)(buffer));
} else {
buffer_data->second.in_use.fetch_add(1);
}
@@ -3179,8 +4003,13 @@
for (auto set : pCB->uniqueBoundSets) {
auto setNode = my_data->setMap.find(set);
if (setNode == my_data->setMap.end()) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)(set), __LINE__, DRAWSTATE_INVALID_DESCRIPTOR_SET, "DS",
- "Cannot submit cmd buffer using deleted descriptor set %" PRIu64 ".", (uint64_t)(set));
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)(set),
+ __LINE__, DRAWSTATE_INVALID_DESCRIPTOR_SET, "DS",
+ "Cannot submit cmd buffer using deleted descriptor set %" PRIu64
+ ".",
+ (uint64_t)(set));
} else {
setNode->second->in_use.fetch_add(1);
}
@@ -3188,8 +4017,8 @@
return skip_call;
}
-void decrementResources(layer_data* my_data, VkCommandBuffer cmdBuffer) {
- GLOBAL_CB_NODE* pCB = getCBNode(my_data, cmdBuffer);
+void decrementResources(layer_data *my_data, VkCommandBuffer cmdBuffer) {
+ GLOBAL_CB_NODE *pCB = getCBNode(my_data, cmdBuffer);
for (auto drawDataElement : pCB->drawData) {
for (auto buffer : drawDataElement.buffers) {
auto buffer_data = my_data->bufferMap.find(buffer);
@@ -3209,10 +4038,13 @@
}
}
-void decrementResources(layer_data* my_data, uint32_t fenceCount, const VkFence* pFences) {
+void decrementResources(layer_data *my_data, uint32_t fenceCount,
+ const VkFence *pFences) {
for (uint32_t i = 0; i < fenceCount; ++i) {
auto fence_data = my_data->fenceMap.find(pFences[i]);
- if (fence_data == my_data->fenceMap.end() || !fence_data->second.needsSignaled) return;
+ if (fence_data == my_data->fenceMap.end() ||
+ !fence_data->second.needsSignaled)
+ return;
fence_data->second.needsSignaled = false;
if (fence_data->second.priorFence != VK_NULL_HANDLE) {
decrementResources(my_data, 1, &fence_data->second.priorFence);
@@ -3223,7 +4055,7 @@
}
}
-void decrementResources(layer_data* my_data, VkQueue queue) {
+void decrementResources(layer_data *my_data, VkQueue queue) {
auto queue_data = my_data->queueMap.find(queue);
if (queue_data != my_data->queueMap.end()) {
for (auto cmdBuffer : queue_data->second.untrackedCmdBuffers) {
@@ -3234,7 +4066,9 @@
}
}
-void trackCommandBuffers(layer_data* my_data, VkQueue queue, uint32_t cmdBufferCount, const VkCommandBuffer* pCmdBuffers, VkFence fence) {
+void trackCommandBuffers(layer_data *my_data, VkQueue queue,
+ uint32_t cmdBufferCount,
+ const VkCommandBuffer *pCmdBuffers, VkFence fence) {
auto queue_data = my_data->queueMap.find(queue);
if (fence != VK_NULL_HANDLE) {
VkFence priorFence = VK_NULL_HANDLE;
@@ -3251,27 +4085,37 @@
my_data->fenceMap[fence].needsSignaled = true;
my_data->fenceMap[fence].queue = queue;
for (uint32_t i = 0; i < cmdBufferCount; ++i) {
- for (auto secondaryCmdBuffer : my_data->commandBufferMap[pCmdBuffers[i]]->secondaryCommandBuffers) {
- my_data->fenceMap[fence].cmdBuffers.push_back(secondaryCmdBuffer);
+ for (auto secondaryCmdBuffer :
+ my_data->commandBufferMap[pCmdBuffers[i]]
+ ->secondaryCommandBuffers) {
+ my_data->fenceMap[fence].cmdBuffers.push_back(
+ secondaryCmdBuffer);
}
my_data->fenceMap[fence].cmdBuffers.push_back(pCmdBuffers[i]);
}
} else {
if (queue_data != my_data->queueMap.end()) {
for (uint32_t i = 0; i < cmdBufferCount; ++i) {
- for (auto secondaryCmdBuffer : my_data->commandBufferMap[pCmdBuffers[i]]->secondaryCommandBuffers) {
- queue_data->second.untrackedCmdBuffers.push_back(secondaryCmdBuffer);
+ for (auto secondaryCmdBuffer :
+ my_data->commandBufferMap[pCmdBuffers[i]]
+ ->secondaryCommandBuffers) {
+ queue_data->second.untrackedCmdBuffers.push_back(
+ secondaryCmdBuffer);
}
- queue_data->second.untrackedCmdBuffers.push_back(pCmdBuffers[i]);
+ queue_data->second.untrackedCmdBuffers.push_back(
+ pCmdBuffers[i]);
}
}
}
if (queue_data != my_data->queueMap.end()) {
for (uint32_t i = 0; i < cmdBufferCount; ++i) {
// Add cmdBuffers to both the global set and queue set
- for (auto secondaryCmdBuffer : my_data->commandBufferMap[pCmdBuffers[i]]->secondaryCommandBuffers) {
+ for (auto secondaryCmdBuffer :
+ my_data->commandBufferMap[pCmdBuffers[i]]
+ ->secondaryCommandBuffers) {
my_data->globalInFlightCmdBuffers.insert(secondaryCmdBuffer);
- queue_data->second.inFlightCmdBuffers.insert(secondaryCmdBuffer);
+ queue_data->second.inFlightCmdBuffers.insert(
+ secondaryCmdBuffer);
}
my_data->globalInFlightCmdBuffers.insert(pCmdBuffers[i]);
queue_data->second.inFlightCmdBuffers.insert(pCmdBuffers[i]);
@@ -3279,19 +4123,26 @@
}
}
-static VkBool32 validateCommandBufferState(layer_data* dev_data, GLOBAL_CB_NODE* pCB)
-{
+static VkBool32 validateCommandBufferState(layer_data *dev_data,
+ GLOBAL_CB_NODE *pCB) {
// Track in-use for resources off of primary and any secondary CBs
VkBool32 skipCall = validateAndIncrementResources(dev_data, pCB);
if (!pCB->secondaryCommandBuffers.empty()) {
for (auto secondaryCmdBuffer : pCB->secondaryCommandBuffers) {
- skipCall |= validateAndIncrementResources(dev_data, dev_data->commandBufferMap[secondaryCmdBuffer]);
+ skipCall |= validateAndIncrementResources(
+ dev_data, dev_data->commandBufferMap[secondaryCmdBuffer]);
}
}
- if ((pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) && (pCB->submitCount > 1)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_COMMAND_BUFFER_SINGLE_SUBMIT_VIOLATION, "DS",
- "CB %#" PRIxLEAST64 " was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT set, but has been submitted %#" PRIxLEAST64 " times.",
- (uint64_t)(pCB->commandBuffer), pCB->submitCount);
+ if ((pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) &&
+ (pCB->submitCount > 1)) {
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_COMMAND_BUFFER_SINGLE_SUBMIT_VIOLATION, "DS",
+ "CB %#" PRIxLEAST64
+ " was begun w/ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT "
+ "set, but has been submitted %#" PRIxLEAST64 " times.",
+ (uint64_t)(pCB->commandBuffer), pCB->submitCount);
}
// Validate that cmd buffers have been updated
if (CB_RECORDED != pCB->state) {
@@ -3302,56 +4153,91 @@
for (auto set : pCB->destroyedSets) {
set_string << " " << set;
}
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
- "You are submitting command buffer %#" PRIxLEAST64 " that is invalid because it had the following bound descriptor set(s) destroyed: %s", (uint64_t)(pCB->commandBuffer), set_string.str().c_str());
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)(pCB->commandBuffer), __LINE__,
+ DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
+ "You are submitting command buffer %#" PRIxLEAST64
+ " that is invalid because it had the following bound "
+ "descriptor set(s) destroyed: %s",
+ (uint64_t)(pCB->commandBuffer), set_string.str().c_str());
}
if (!pCB->updatedSets.empty()) {
std::stringstream set_string;
for (auto set : pCB->updatedSets) {
set_string << " " << set;
}
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
- "You are submitting command buffer %#" PRIxLEAST64 " that is invalid because it had the following bound descriptor set(s) updated: %s", (uint64_t)(pCB->commandBuffer), set_string.str().c_str());
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)(pCB->commandBuffer), __LINE__,
+ DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
+ "You are submitting command buffer %#" PRIxLEAST64
+ " that is invalid because it had the following bound "
+ "descriptor set(s) updated: %s",
+ (uint64_t)(pCB->commandBuffer), set_string.str().c_str());
}
} else { // Flag error for using CB w/o vkEndCommandBuffer() called
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(pCB->commandBuffer), __LINE__, DRAWSTATE_NO_END_COMMAND_BUFFER, "DS",
- "You must call vkEndCommandBuffer() on CB %#" PRIxLEAST64 " before this call to vkQueueSubmit()!", (uint64_t)(pCB->commandBuffer));
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)(pCB->commandBuffer), __LINE__,
+ DRAWSTATE_NO_END_COMMAND_BUFFER, "DS",
+ "You must call vkEndCommandBuffer() on CB %#" PRIxLEAST64
+ " before this call to vkQueueSubmit()!",
+ (uint64_t)(pCB->commandBuffer));
loader_platform_thread_unlock_mutex(&globalLock);
return VK_ERROR_VALIDATION_FAILED_EXT;
}
}
- // If USAGE_SIMULTANEOUS_USE_BIT not set then CB cannot already be executing on device
- if (!(pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
- if (dev_data->globalInFlightCmdBuffers.find(pCB->commandBuffer) != dev_data->globalInFlightCmdBuffers.end()) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_CB_SIMULTANEOUS_USE, "DS",
- "Attempt to simultaneously execute CB %#" PRIxLEAST64 " w/o VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set!", (uint64_t)(pCB->commandBuffer));
+ // If USAGE_SIMULTANEOUS_USE_BIT not set then CB cannot already be executing
+ // on device
+ if (!(pCB->beginInfo.flags &
+ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
+ if (dev_data->globalInFlightCmdBuffers.find(pCB->commandBuffer) !=
+ dev_data->globalInFlightCmdBuffers.end()) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)(pCB->commandBuffer), __LINE__,
+ DRAWSTATE_INVALID_CB_SIMULTANEOUS_USE, "DS",
+ "Attempt to simultaneously execute CB %#" PRIxLEAST64
+ " w/o VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set!",
+ (uint64_t)(pCB->commandBuffer));
}
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueueSubmit(VkQueue queue, uint32_t submitCount,
+ const VkSubmitInfo *pSubmits, VkFence fence) {
VkBool32 skipCall = VK_FALSE;
- GLOBAL_CB_NODE* pCB = NULL;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+ GLOBAL_CB_NODE *pCB = NULL;
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
for (uint32_t submit_idx = 0; submit_idx < submitCount; submit_idx++) {
const VkSubmitInfo *submit = &pSubmits[submit_idx];
- for (uint32_t i=0; i < submit->waitSemaphoreCount; ++i) {
+ for (uint32_t i = 0; i < submit->waitSemaphoreCount; ++i) {
if (dev_data->semaphoreSignaledMap[submit->pWaitSemaphores[i]]) {
dev_data->semaphoreSignaledMap[submit->pWaitSemaphores[i]] = 0;
} else {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_QUEUE_FORWARD_PROGRESS, "DS",
- "Queue %#" PRIx64 " is waiting on semaphore %#" PRIx64 " that has no way to be signaled.",
- (uint64_t)(queue), (uint64_t)(submit->pWaitSemaphores[i]));
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_QUEUE_FORWARD_PROGRESS, "DS",
+ "Queue %#" PRIx64 " is waiting on semaphore %#" PRIx64
+ " that has no way to be signaled.",
+ (uint64_t)(queue), (uint64_t)(submit->pWaitSemaphores[i]));
}
}
- for (uint32_t i=0; i < submit->signalSemaphoreCount; ++i) {
+ for (uint32_t i = 0; i < submit->signalSemaphoreCount; ++i) {
dev_data->semaphoreSignaledMap[submit->pSignalSemaphores[i]] = 1;
}
- for (uint32_t i=0; i < submit->commandBufferCount; i++) {
+ for (uint32_t i = 0; i < submit->commandBufferCount; i++) {
#ifndef DISABLE_IMAGE_LAYOUT_VALIDATION
- skipCall |= ValidateCmdBufImageLayouts(submit->pCommandBuffers[i]);
+ skipCall |= ValidateCmdBufImageLayouts(submit->pCommandBuffers[i]);
#endif // DISABLE_IMAGE_LAYOUT_VALIDATION
pCB = getCBNode(dev_data, submit->pCommandBuffers[i]);
@@ -3360,23 +4246,32 @@
skipCall |= validateCommandBufferState(dev_data, pCB);
loader_platform_thread_unlock_mutex(&globalLock);
}
- trackCommandBuffers(dev_data, queue, submit->commandBufferCount, submit->pCommandBuffers, fence);
+ trackCommandBuffers(dev_data, queue, submit->commandBufferCount,
+ submit->pCommandBuffers, fence);
}
if (VK_FALSE == skipCall)
- return dev_data->device_dispatch_table->QueueSubmit(queue, submitCount, pSubmits, fence);
+ return dev_data->device_dispatch_table->QueueSubmit(queue, submitCount,
+ pSubmits, fence);
return VK_ERROR_VALIDATION_FAILED_EXT;
}
-VkBool32 cleanInFlightCmdBuffer(layer_data* my_data, VkCommandBuffer cmdBuffer) {
+VkBool32 cleanInFlightCmdBuffer(layer_data *my_data,
+ VkCommandBuffer cmdBuffer) {
VkBool32 skip_call = VK_FALSE;
- GLOBAL_CB_NODE* pCB = getCBNode(my_data, cmdBuffer);
+ GLOBAL_CB_NODE *pCB = getCBNode(my_data, cmdBuffer);
if (pCB) {
for (auto queryEventsPair : pCB->waitedEventsBeforeQueryReset) {
for (auto event : queryEventsPair.second) {
if (my_data->eventMap[event].needsSignaled) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, 0, DRAWSTATE_INVALID_QUERY, "DS",
- "Cannot get query results on queryPool %" PRIu64 " with index %d which was guarded by unsignaled event %" PRIu64 ".",
- (uint64_t)(queryEventsPair.first.pool), queryEventsPair.first.index, (uint64_t)(event));
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, 0,
+ DRAWSTATE_INVALID_QUERY, "DS",
+ "Cannot get query results on queryPool %" PRIu64
+ " with index %d which was guarded by unsignaled event "
+ "%" PRIu64 ".",
+ (uint64_t)(queryEventsPair.first.pool),
+ queryEventsPair.first.index, (uint64_t)(event));
}
}
}
@@ -3385,16 +4280,21 @@
}
// Remove given cmd_buffer from the global inFlight set.
// Also, if given queue is valid, then remove the cmd_buffer from that queues
-// inFlightCmdBuffer set. Finally, check all other queues and if given cmd_buffer
+// inFlightCmdBuffer set. Finally, check all other queues and if given
+// cmd_buffer
// is still in flight on another queue, add it back into the global set.
-static inline void removeInFlightCmdBuffer(layer_data* dev_data, VkCommandBuffer cmd_buffer, VkQueue queue)
-{
- // Pull it off of global list initially, but if we find it in any other queue list, add it back in
+static inline void removeInFlightCmdBuffer(layer_data *dev_data,
+ VkCommandBuffer cmd_buffer,
+ VkQueue queue) {
+ // Pull it off of global list initially, but if we find it in any other
+ // queue list, add it back in
dev_data->globalInFlightCmdBuffers.erase(cmd_buffer);
if (dev_data->queueMap.find(queue) != dev_data->queueMap.end()) {
dev_data->queueMap[queue].inFlightCmdBuffers.erase(cmd_buffer);
for (auto q : dev_data->queues) {
- if ((q != queue) && (dev_data->queueMap[q].inFlightCmdBuffers.find(cmd_buffer) != dev_data->queueMap[q].inFlightCmdBuffers.end())) {
+ if ((q != queue) &&
+ (dev_data->queueMap[q].inFlightCmdBuffers.find(cmd_buffer) !=
+ dev_data->queueMap[q].inFlightCmdBuffers.end())) {
dev_data->globalInFlightCmdBuffers.insert(cmd_buffer);
break;
}
@@ -3402,37 +4302,47 @@
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkWaitForFences(VkDevice device, uint32_t fenceCount,
+ const VkFence *pFences, VkBool32 waitAll,
+ uint64_t timeout) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->WaitForFences(
+ device, fenceCount, pFences, waitAll, timeout);
VkBool32 skip_call = VK_FALSE;
if (result == VK_SUCCESS) {
- // When we know that all fences are complete we can clean/remove their CBs
+ // When we know that all fences are complete we can clean/remove their
+ // CBs
if (waitAll || fenceCount == 1) {
for (uint32_t i = 0; i < fenceCount; ++i) {
VkQueue fence_queue = dev_data->fenceMap[pFences[i]].queue;
- for (auto cmdBuffer : dev_data->fenceMap[pFences[i]].cmdBuffers) {
+ for (auto cmdBuffer :
+ dev_data->fenceMap[pFences[i]].cmdBuffers) {
skip_call |= cleanInFlightCmdBuffer(dev_data, cmdBuffer);
removeInFlightCmdBuffer(dev_data, cmdBuffer, fence_queue);
}
}
decrementResources(dev_data, fenceCount, pFences);
}
- // NOTE : Alternate case not handled here is when some fences have completed. In
- // this case for app to guarantee which fences completed it will have to call
- // vkGetFenceStatus() at which point we'll clean/remove their CBs if complete.
+ // NOTE : Alternate case not handled here is when some fences have
+ // completed. In
+ // this case for app to guarantee which fences completed it will have
+ // to call
+ // vkGetFenceStatus() at which point we'll clean/remove their CBs if
+ // complete.
}
if (VK_FALSE != skip_call)
return VK_ERROR_VALIDATION_FAILED_EXT;
return result;
}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->GetFenceStatus(device, fence);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetFenceStatus(VkDevice device, VkFence fence) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result =
+ dev_data->device_dispatch_table->GetFenceStatus(device, fence);
VkBool32 skip_call = VK_FALSE;
if (result == VK_SUCCESS) {
auto fence_queue = dev_data->fenceMap[fence].queue;
@@ -3447,17 +4357,20 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- dev_data->device_dispatch_table->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex,
+ uint32_t queueIndex, VkQueue *pQueue) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ dev_data->device_dispatch_table->GetDeviceQueue(device, queueFamilyIndex,
+ queueIndex, pQueue);
dev_data->queues.push_back(*pQueue);
dev_data->queueMap[*pQueue].device = device;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
decrementResources(dev_data, queue);
VkBool32 skip_call = VK_FALSE;
// Iterate over local set since we erase set members as we go in for loop
@@ -3472,14 +4385,16 @@
return dev_data->device_dispatch_table->QueueWaitIdle(queue);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkDeviceWaitIdle(VkDevice device) {
VkBool32 skip_call = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
for (auto queue : dev_data->queues) {
decrementResources(dev_data, queue);
if (dev_data->queueMap.find(queue) != dev_data->queueMap.end()) {
- // Clear all of the queue inFlightCmdBuffers (global set cleared below)
+ // Clear all of the queue inFlightCmdBuffers (global set cleared
+ // below)
dev_data->queueMap[queue].inFlightCmdBuffers.clear();
}
}
@@ -3492,37 +4407,51 @@
return dev_data->device_dispatch_table->DeviceWaitIdle(device);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyFence(device, fence, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyFence(VkDevice device, VkFence fence,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyFence(device, fence, pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- dev_data->device_dispatch_table->DestroySemaphore(device, semaphore, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroySemaphore(VkDevice device, VkSemaphore semaphore,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ dev_data->device_dispatch_table->DestroySemaphore(device, semaphore,
+ pAllocator);
dev_data->semaphoreSignaledMap.erase(semaphore);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyEvent(device, event, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyEvent(VkDevice device, VkEvent event,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyEvent(device, event, pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyQueryPool(device, queryPool, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyQueryPool(device, queryPool,
+ pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount,
- size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool,
+ uint32_t firstQuery, uint32_t queryCount,
+ size_t dataSize, void *pData, VkDeviceSize stride,
+ VkQueryResultFlags flags) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
unordered_map<QueryObject, vector<VkCommandBuffer>> queriesInFlight;
- GLOBAL_CB_NODE* pCB = nullptr;
+ GLOBAL_CB_NODE *pCB = nullptr;
for (auto cmdBuffer : dev_data->globalInFlightCmdBuffers) {
pCB = getCBNode(dev_data, cmdBuffer);
for (auto queryStatePair : pCB->queryToStateMap) {
@@ -3537,134 +4466,202 @@
if (queryToStateElement != dev_data->queryToStateMap.end()) {
}
// Available and in flight
- if(queryElement != queriesInFlight.end() && queryToStateElement != dev_data->queryToStateMap.end() && queryToStateElement->second) {
+ if (queryElement != queriesInFlight.end() &&
+ queryToStateElement != dev_data->queryToStateMap.end() &&
+ queryToStateElement->second) {
for (auto cmdBuffer : queryElement->second) {
pCB = getCBNode(dev_data, cmdBuffer);
- auto queryEventElement = pCB->waitedEventsBeforeQueryReset.find(query);
- if (queryEventElement == pCB->waitedEventsBeforeQueryReset.end()) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, __LINE__,
- DRAWSTATE_INVALID_QUERY, "DS", "Cannot get query results on queryPool %" PRIu64 " with index %d which is in flight.",
- (uint64_t)(queryPool), firstQuery + i);
+ auto queryEventElement =
+ pCB->waitedEventsBeforeQueryReset.find(query);
+ if (queryEventElement ==
+ pCB->waitedEventsBeforeQueryReset.end()) {
+ skip_call |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_QUERY, "DS",
+ "Cannot get query results on queryPool %" PRIu64
+ " with index %d which is in flight.",
+ (uint64_t)(queryPool), firstQuery + i);
} else {
for (auto event : queryEventElement->second) {
dev_data->eventMap[event].needsSignaled = true;
}
}
}
- // Unavailable and in flight
- } else if (queryElement != queriesInFlight.end() && queryToStateElement != dev_data->queryToStateMap.end() && !queryToStateElement->second) {
- // TODO : Can there be the same query in use by multiple command buffers in flight?
+ // Unavailable and in flight
+ } else if (queryElement != queriesInFlight.end() &&
+ queryToStateElement != dev_data->queryToStateMap.end() &&
+ !queryToStateElement->second) {
+ // TODO : Can there be the same query in use by multiple command
+ // buffers in flight?
bool make_available = false;
for (auto cmdBuffer : queryElement->second) {
pCB = getCBNode(dev_data, cmdBuffer);
make_available |= pCB->queryToStateMap[query];
}
- if (!(((flags & VK_QUERY_RESULT_PARTIAL_BIT) || (flags & VK_QUERY_RESULT_WAIT_BIT)) && make_available)) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, __LINE__, DRAWSTATE_INVALID_QUERY, "DS",
- "Cannot get query results on queryPool %" PRIu64 " with index %d which is unavailable.",
- (uint64_t)(queryPool), firstQuery + i);
+ if (!(((flags & VK_QUERY_RESULT_PARTIAL_BIT) ||
+ (flags & VK_QUERY_RESULT_WAIT_BIT)) &&
+ make_available)) {
+ skip_call |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_QUERY, "DS",
+ "Cannot get query results on queryPool %" PRIu64
+ " with index %d which is unavailable.",
+ (uint64_t)(queryPool), firstQuery + i);
}
- // Unavailable
- } else if (queryToStateElement != dev_data->queryToStateMap.end() && !queryToStateElement->second) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, __LINE__, DRAWSTATE_INVALID_QUERY, "DS",
- "Cannot get query results on queryPool %" PRIu64 " with index %d which is unavailable.",
- (uint64_t)(queryPool), firstQuery + i);
- // Unitialized
+ // Unavailable
+ } else if (queryToStateElement != dev_data->queryToStateMap.end() &&
+ !queryToStateElement->second) {
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_QUERY, "DS",
+ "Cannot get query results on queryPool %" PRIu64
+ " with index %d which is unavailable.",
+ (uint64_t)(queryPool), firstQuery + i);
+ // Unitialized
} else if (queryToStateElement == dev_data->queryToStateMap.end()) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, __LINE__, DRAWSTATE_INVALID_QUERY, "DS",
- "Cannot get query results on queryPool %" PRIu64 " with index %d which is uninitialized.",
- (uint64_t)(queryPool), firstQuery + i);
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_QUERY, "DS",
+ "Cannot get query results on queryPool %" PRIu64
+ " with index %d which is uninitialized.",
+ (uint64_t)(queryPool), firstQuery + i);
}
}
if (skip_call)
return VK_ERROR_VALIDATION_FAILED_EXT;
- return dev_data->device_dispatch_table->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+ return dev_data->device_dispatch_table->GetQueryPoolResults(
+ device, queryPool, firstQuery, queryCount, dataSize, pData, stride,
+ flags);
}
-VkBool32 validateIdleBuffer(const layer_data* my_data, VkBuffer buffer) {
+VkBool32 validateIdleBuffer(const layer_data *my_data, VkBuffer buffer) {
VkBool32 skip_call = VK_FALSE;
auto buffer_data = my_data->bufferMap.find(buffer);
if (buffer_data == my_data->bufferMap.end()) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, (uint64_t)(buffer), __LINE__, DRAWSTATE_DOUBLE_DESTROY, "DS",
- "Cannot free buffer %" PRIxLEAST64 " that has not been allocated.", (uint64_t)(buffer));
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, (uint64_t)(buffer),
+ __LINE__, DRAWSTATE_DOUBLE_DESTROY, "DS",
+ "Cannot free buffer %" PRIxLEAST64 " that has not been allocated.",
+ (uint64_t)(buffer));
} else {
if (buffer_data->second.in_use.load()) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, (uint64_t)(buffer), __LINE__, DRAWSTATE_OBJECT_INUSE, "DS",
- "Cannot free buffer %" PRIxLEAST64 " that is in use by a command buffer.", (uint64_t)(buffer));
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ (uint64_t)(buffer), __LINE__, DRAWSTATE_OBJECT_INUSE,
+ "DS", "Cannot free buffer %" PRIxLEAST64
+ " that is in use by a command buffer.",
+ (uint64_t)(buffer));
}
}
return skip_call;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyBuffer(VkDevice device, VkBuffer buffer,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skip_call = VK_FALSE;
if (!validateIdleBuffer(dev_data, buffer)) {
- dev_data->device_dispatch_table->DestroyBuffer(device, buffer, pAllocator);
+ dev_data->device_dispatch_table->DestroyBuffer(device, buffer,
+ pAllocator);
}
dev_data->bufferMap.erase(buffer);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- dev_data->device_dispatch_table->DestroyBufferView(device, bufferView, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyBufferView(VkDevice device, VkBufferView bufferView,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ dev_data->device_dispatch_table->DestroyBufferView(device, bufferView,
+ pAllocator);
dev_data->bufferViewMap.erase(bufferView);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyImage(VkDevice device, VkImage image,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
dev_data->device_dispatch_table->DestroyImage(device, image, pAllocator);
dev_data->imageMap.erase(image);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyImageView(device, imageView, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyImageView(VkDevice device, VkImageView imageView,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyImageView(device, imageView,
+ pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyShaderModule(device, shaderModule, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyShaderModule(device, shaderModule,
+ pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyPipeline(device, pipeline, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyPipeline(VkDevice device, VkPipeline pipeline,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyPipeline(device, pipeline, pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyPipelineLayout(device, pipelineLayout,
+ pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroySampler(device, sampler, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroySampler(VkDevice device, VkSampler sampler,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroySampler(device, sampler, pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDescriptorSetLayout(VkDevice device,
+ VkDescriptorSetLayout descriptorSetLayout,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyDescriptorSetLayout(
+ device, descriptorSetLayout, pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyDescriptorPool(device, descriptorPool, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyDescriptorPool(device, descriptorPool,
+ pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t count, const VkCommandBuffer *pCommandBuffers)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
+ uint32_t count,
+ const VkCommandBuffer *pCommandBuffers) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
for (uint32_t i = 0; i < count; i++) {
loader_platform_thread_lock_mutex(&globalLock);
@@ -3678,37 +4675,51 @@
}
// Remove commandBuffer reference from commandPoolMap
- dev_data->commandPoolMap[commandPool].commandBuffers.remove(pCommandBuffers[i]);
+ dev_data->commandPoolMap[commandPool].commandBuffers.remove(
+ pCommandBuffers[i]);
loader_platform_thread_unlock_mutex(&globalLock);
}
- dev_data->device_dispatch_table->FreeCommandBuffers(device, commandPool, count, pCommandBuffers);
+ dev_data->device_dispatch_table->FreeCommandBuffers(device, commandPool,
+ count, pCommandBuffers);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateCommandPool(VkDevice device,
+ const VkCommandPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkCommandPool *pCommandPool) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+ VkResult result = dev_data->device_dispatch_table->CreateCommandPool(
+ device, pCreateInfo, pAllocator, pCommandPool);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
- dev_data->commandPoolMap[*pCommandPool].createFlags = pCreateInfo->flags;
+ dev_data->commandPoolMap[*pCommandPool].createFlags =
+ pCreateInfo->flags;
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VkBool32 validateCommandBuffersNotInUse(const layer_data* dev_data, VkCommandPool commandPool) {
+VkBool32 validateCommandBuffersNotInUse(const layer_data *dev_data,
+ VkCommandPool commandPool) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
auto pool_data = dev_data->commandPoolMap.find(commandPool);
if (pool_data != dev_data->commandPoolMap.end()) {
for (auto cmdBuffer : pool_data->second.commandBuffers) {
if (dev_data->globalInFlightCmdBuffers.count(cmdBuffer)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, (uint64_t)(commandPool),
- __LINE__, DRAWSTATE_OBJECT_INUSE, "DS", "Cannot reset command pool %" PRIx64 " when allocated command buffer %" PRIx64 " is in use.",
- (uint64_t)(commandPool), (uint64_t)(cmdBuffer));
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+ (uint64_t)(commandPool), __LINE__, DRAWSTATE_OBJECT_INUSE,
+ "DS",
+ "Cannot reset command pool %" PRIx64
+ " when allocated command buffer %" PRIx64 " is in use.",
+ (uint64_t)(commandPool), (uint64_t)(cmdBuffer));
}
}
}
@@ -3716,19 +4727,29 @@
return skipCall;
}
-// Destroy commandPool along with all of the commandBuffers allocated from that pool
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+// Destroy commandPool along with all of the commandBuffers allocated from that
+// pool
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
loader_platform_thread_lock_mutex(&globalLock);
- // Must remove cmdpool from cmdpoolmap, after removing all cmdbuffers in its list from the commandPoolMap
- if (dev_data->commandPoolMap.find(commandPool) != dev_data->commandPoolMap.end()) {
- for (auto poolCb = dev_data->commandPoolMap[commandPool].commandBuffers.begin(); poolCb != dev_data->commandPoolMap[commandPool].commandBuffers.end();) {
+ // Must remove cmdpool from cmdpoolmap, after removing all cmdbuffers in its
+ // list from the commandPoolMap
+ if (dev_data->commandPoolMap.find(commandPool) !=
+ dev_data->commandPoolMap.end()) {
+ for (auto poolCb =
+ dev_data->commandPoolMap[commandPool].commandBuffers.begin();
+ poolCb !=
+ dev_data->commandPoolMap[commandPool].commandBuffers.end();) {
auto del_cb = dev_data->commandBufferMap.find(*poolCb);
- delete (*del_cb).second; // delete CB info structure
- dev_data->commandBufferMap.erase(del_cb); // Remove this command buffer from cbMap
- poolCb = dev_data->commandPoolMap[commandPool].commandBuffers.erase(poolCb); // Remove CB reference from commandPoolMap's list
+ delete (*del_cb).second; // delete CB info structure
+ dev_data->commandBufferMap.erase(
+ del_cb); // Remove this command buffer from cbMap
+ poolCb = dev_data->commandPoolMap[commandPool].commandBuffers.erase(
+ poolCb); // Remove CB reference from commandPoolMap's list
}
}
dev_data->commandPoolMap.erase(commandPool);
@@ -3738,26 +4759,28 @@
if (VK_TRUE == validateCommandBuffersNotInUse(dev_data, commandPool))
return;
- dev_data->device_dispatch_table->DestroyCommandPool(device, commandPool, pAllocator);
+ dev_data->device_dispatch_table->DestroyCommandPool(device, commandPool,
+ pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags)
-{
- layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetCommandPool(VkDevice device, VkCommandPool commandPool,
+ VkCommandPoolResetFlags flags) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
if (VK_TRUE == validateCommandBuffersNotInUse(dev_data, commandPool))
return VK_ERROR_VALIDATION_FAILED_EXT;
- result = dev_data->device_dispatch_table->ResetCommandPool(device, commandPool, flags);
+ result = dev_data->device_dispatch_table->ResetCommandPool(
+ device, commandPool, flags);
// Reset all of the CBs allocated from this pool
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
auto it = dev_data->commandPoolMap[commandPool].commandBuffers.begin();
- while (it != dev_data->commandPoolMap[commandPool].commandBuffers.end()) {
+ while (it !=
+ dev_data->commandPoolMap[commandPool].commandBuffers.end()) {
resetCB(dev_data, (*it));
++it;
}
@@ -3766,138 +4789,167 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyFramebuffer(device, framebuffer, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyFramebuffer(device, framebuffer,
+ pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator)
-{
- get_my_data_ptr(get_dispatch_key(device), layer_data_map)->device_dispatch_table->DestroyRenderPass(device, renderPass, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
+ const VkAllocationCallbacks *pAllocator) {
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map)
+ ->device_dispatch_table->DestroyRenderPass(device, renderPass,
+ pAllocator);
// TODO : Clean up any internal data structures using this obj.
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateBuffer(
+ device, pCreateInfo, pAllocator, pBuffer);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
- // TODO : This doesn't create deep copy of pQueueFamilyIndices so need to fix that if/when we want that data to be valid
- dev_data->bufferMap[*pBuffer].create_info = unique_ptr<VkBufferCreateInfo>(new VkBufferCreateInfo(*pCreateInfo));
+ // TODO : This doesn't create deep copy of pQueueFamilyIndices so need
+ // to fix that if/when we want that data to be valid
+ dev_data->bufferMap[*pBuffer].create_info =
+ unique_ptr<VkBufferCreateInfo>(
+ new VkBufferCreateInfo(*pCreateInfo));
dev_data->bufferMap[*pBuffer].in_use.store(0);
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateBufferView(device, pCreateInfo, pAllocator, pView);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateBufferView(VkDevice device,
+ const VkBufferViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkBufferView *pView) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateBufferView(
+ device, pCreateInfo, pAllocator, pView);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
- dev_data->bufferViewMap[*pView] = unique_ptr<VkBufferViewCreateInfo>(new VkBufferViewCreateInfo(*pCreateInfo));
+ dev_data->bufferViewMap[*pView] = unique_ptr<VkBufferViewCreateInfo>(
+ new VkBufferViewCreateInfo(*pCreateInfo));
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateImage(device, pCreateInfo, pAllocator, pImage);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateImage(
+ device, pCreateInfo, pAllocator, pImage);
if (VK_SUCCESS == result) {
- IMAGE_NODE* image_node = new IMAGE_NODE;
+ IMAGE_NODE *image_node = new IMAGE_NODE;
image_node->layout = pCreateInfo->initialLayout;
image_node->format = pCreateInfo->format;
loader_platform_thread_lock_mutex(&globalLock);
- dev_data->imageMap[*pImage] = unique_ptr<VkImageCreateInfo>(new VkImageCreateInfo(*pCreateInfo));
+ dev_data->imageMap[*pImage] =
+ unique_ptr<VkImageCreateInfo>(new VkImageCreateInfo(*pCreateInfo));
dev_data->imageLayoutMap[*pImage] = image_node;
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateImageView(device, pCreateInfo, pAllocator, pView);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkImageView *pView) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateImageView(
+ device, pCreateInfo, pAllocator, pView);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
- dev_data->imageViewMap[*pView] = unique_ptr<VkImageViewCreateInfo>(new VkImageViewCreateInfo(*pCreateInfo));
+ dev_data->imageViewMap[*pView] = unique_ptr<VkImageViewCreateInfo>(
+ new VkImageViewCreateInfo(*pCreateInfo));
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-//TODO handle pipeline caches
-VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+// TODO handle pipeline caches
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreatePipelineCache(VkDevice device,
+ const VkPipelineCacheCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkPipelineCache *pPipelineCache) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreatePipelineCache(
+ device, pCreateInfo, pAllocator, pPipelineCache);
return result;
}
-VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(
- VkDevice device,
- VkPipelineCache pipelineCache,
- const VkAllocationCallbacks* pAllocator)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- dev_data->device_dispatch_table->DestroyPipelineCache(device, pipelineCache, pAllocator);
+VKAPI_ATTR void VKAPI_CALL
+ vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ dev_data->device_dispatch_table->DestroyPipelineCache(device, pipelineCache,
+ pAllocator);
}
-VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache,
+ size_t *pDataSize, void *pData) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->GetPipelineCacheData(
+ device, pipelineCache, pDataSize, pData);
return result;
}
-VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache,
+ uint32_t srcCacheCount,
+ const VkPipelineCache *pSrcCaches) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->MergePipelineCaches(
+ device, dstCache, srcCacheCount, pSrcCaches);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t count,
- const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator,
- VkPipeline *pPipelines)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache,
+ uint32_t count,
+ const VkGraphicsPipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator,
+ VkPipeline *pPipelines) {
VkResult result = VK_SUCCESS;
- //TODO What to do with pipelineCache?
+ // TODO What to do with pipelineCache?
// The order of operations here is a little convoluted but gets the job done
// 1. Pipeline create state is first shadowed into PIPELINE_NODE struct
- // 2. Create state is then validated (which uses flags setup during shadowing)
- // 3. If everything looks good, we'll then create the pipeline and add NODE to pipelineMap
+ // 2. Create state is then validated (which uses flags setup during
+ // shadowing)
+ // 3. If everything looks good, we'll then create the pipeline and add NODE
+ // to pipelineMap
VkBool32 skipCall = VK_FALSE;
- // TODO : Improve this data struct w/ unique_ptrs so cleanup below is automatic
- vector<PIPELINE_NODE*> pPipeNode(count);
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ // TODO : Improve this data struct w/ unique_ptrs so cleanup below is
+ // automatic
+ vector<PIPELINE_NODE *> pPipeNode(count);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- uint32_t i=0;
+ uint32_t i = 0;
loader_platform_thread_lock_mutex(&globalLock);
- for (i=0; i<count; i++) {
+ for (i = 0; i < count; i++) {
pPipeNode[i] = initGraphicsPipeline(dev_data, &pCreateInfos[i], NULL);
skipCall |= verifyPipelineCreateState(dev_data, device, pPipeNode[i]);
}
@@ -3905,16 +4957,16 @@
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = dev_data->device_dispatch_table->CreateGraphicsPipelines(device,
- pipelineCache, count, pCreateInfos, pAllocator, pPipelines);
+ result = dev_data->device_dispatch_table->CreateGraphicsPipelines(
+ device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines);
loader_platform_thread_lock_mutex(&globalLock);
- for (i=0; i<count; i++) {
+ for (i = 0; i < count; i++) {
pPipeNode[i]->pipeline = pPipelines[i];
dev_data->pipelineMap[pPipeNode[i]->pipeline] = pPipeNode[i];
}
loader_platform_thread_unlock_mutex(&globalLock);
} else {
- for (i=0; i<count; i++) {
+ for (i = 0; i < count; i++) {
if (pPipeNode[i]) {
// If we allocated a pipeNode, need to clean it up here
delete[] pPipeNode[i]->pVertexBindingDescriptions;
@@ -3928,45 +4980,48 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t count,
- const VkComputePipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator,
- VkPipeline *pPipelines)
-{
- VkResult result = VK_SUCCESS;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache,
+ uint32_t count,
+ const VkComputePipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator,
+ VkPipeline *pPipelines) {
+ VkResult result = VK_SUCCESS;
VkBool32 skipCall = VK_FALSE;
- // TODO : Improve this data struct w/ unique_ptrs so cleanup below is automatic
- vector<PIPELINE_NODE*> pPipeNode(count);
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ // TODO : Improve this data struct w/ unique_ptrs so cleanup below is
+ // automatic
+ vector<PIPELINE_NODE *> pPipeNode(count);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- uint32_t i=0;
+ uint32_t i = 0;
loader_platform_thread_lock_mutex(&globalLock);
- for (i=0; i<count; i++) {
+ for (i = 0; i < count; i++) {
// TODO: Verify compute stage bits
// Create and initialize internal tracking data structure
pPipeNode[i] = new PIPELINE_NODE;
- memcpy(&pPipeNode[i]->computePipelineCI, (const void*)&pCreateInfos[i], sizeof(VkComputePipelineCreateInfo));
+ memcpy(&pPipeNode[i]->computePipelineCI, (const void *)&pCreateInfos[i],
+ sizeof(VkComputePipelineCreateInfo));
// TODO: Add Compute Pipeline Verification
- // skipCall |= verifyPipelineCreateState(dev_data, device, pPipeNode[i]);
+ // skipCall |= verifyPipelineCreateState(dev_data, device,
+ // pPipeNode[i]);
}
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = dev_data->device_dispatch_table->CreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines);
+ result = dev_data->device_dispatch_table->CreateComputePipelines(
+ device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines);
loader_platform_thread_lock_mutex(&globalLock);
- for (i=0; i<count; i++) {
+ for (i = 0; i < count; i++) {
pPipeNode[i]->pipeline = pPipelines[i];
dev_data->pipelineMap[pPipeNode[i]->pipeline] = pPipeNode[i];
}
loader_platform_thread_unlock_mutex(&globalLock);
} else {
- for (i=0; i<count; i++) {
+ for (i = 0; i < count; i++) {
if (pPipeNode[i]) {
// Clean up any locally allocated data structures
delete pPipeNode[i];
@@ -3977,49 +5032,78 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSampler *pSampler) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateSampler(
+ device, pCreateInfo, pAllocator, pSampler);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
- dev_data->sampleMap[*pSampler] = unique_ptr<SAMPLER_NODE>(new SAMPLER_NODE(pSampler, pCreateInfo));
+ dev_data->sampleMap[*pSampler] =
+ unique_ptr<SAMPLER_NODE>(new SAMPLER_NODE(pSampler, pCreateInfo));
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(
+ VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorSetLayout *pSetLayout) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result =
+ dev_data->device_dispatch_table->CreateDescriptorSetLayout(
+ device, pCreateInfo, pAllocator, pSetLayout);
if (VK_SUCCESS == result) {
// TODOSC : Capture layout bindings set
- LAYOUT_NODE* pNewNode = new LAYOUT_NODE;
+ LAYOUT_NODE *pNewNode = new LAYOUT_NODE;
if (NULL == pNewNode) {
- if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, (uint64_t) *pSetLayout, __LINE__, DRAWSTATE_OUT_OF_MEMORY, "DS",
- "Out of memory while attempting to allocate LAYOUT_NODE in vkCreateDescriptorSetLayout()"))
+ if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+ (uint64_t)*pSetLayout, __LINE__,
+ DRAWSTATE_OUT_OF_MEMORY, "DS",
+ "Out of memory while attempting to allocate "
+ "LAYOUT_NODE in vkCreateDescriptorSetLayout()"))
return VK_ERROR_VALIDATION_FAILED_EXT;
}
- memcpy((void*)&pNewNode->createInfo, pCreateInfo, sizeof(VkDescriptorSetLayoutCreateInfo));
- pNewNode->createInfo.pBindings = new VkDescriptorSetLayoutBinding[pCreateInfo->bindingCount];
- memcpy((void*)pNewNode->createInfo.pBindings, pCreateInfo->pBindings, sizeof(VkDescriptorSetLayoutBinding)*pCreateInfo->bindingCount);
+ memcpy((void *)&pNewNode->createInfo, pCreateInfo,
+ sizeof(VkDescriptorSetLayoutCreateInfo));
+ pNewNode->createInfo.pBindings =
+ new VkDescriptorSetLayoutBinding[pCreateInfo->bindingCount];
+ memcpy((void *)pNewNode->createInfo.pBindings, pCreateInfo->pBindings,
+ sizeof(VkDescriptorSetLayoutBinding) *
+ pCreateInfo->bindingCount);
// g++ does not like reserve with size 0
if (pCreateInfo->bindingCount)
pNewNode->bindings.reserve(pCreateInfo->bindingCount);
uint32_t totalCount = 0;
- for (uint32_t i=0; i<pCreateInfo->bindingCount; i++) {
- if (!pNewNode->bindings.insert(pCreateInfo->pBindings[i].binding).second) {
- if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, (uint64_t) *pSetLayout, __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS",
- "duplicated binding number in VkDescriptorSetLayoutBinding"))
+ for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
+ if (!pNewNode->bindings.insert(pCreateInfo->pBindings[i].binding)
+ .second) {
+ if (log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+ (uint64_t)*pSetLayout, __LINE__,
+ DRAWSTATE_INVALID_LAYOUT, "DS",
+ "duplicated binding number in "
+ "VkDescriptorSetLayoutBinding"))
return VK_ERROR_VALIDATION_FAILED_EXT;
}
totalCount += pCreateInfo->pBindings[i].descriptorCount;
if (pCreateInfo->pBindings[i].pImmutableSamplers) {
- VkSampler** ppIS = (VkSampler**)&pNewNode->createInfo.pBindings[i].pImmutableSamplers;
- *ppIS = new VkSampler[pCreateInfo->pBindings[i].descriptorCount];
- memcpy(*ppIS, pCreateInfo->pBindings[i].pImmutableSamplers, pCreateInfo->pBindings[i].descriptorCount*sizeof(VkSampler));
+ VkSampler **ppIS =
+ (VkSampler **)&pNewNode->createInfo.pBindings[i]
+ .pImmutableSamplers;
+ *ppIS =
+ new VkSampler[pCreateInfo->pBindings[i].descriptorCount];
+ memcpy(*ppIS, pCreateInfo->pBindings[i].pImmutableSamplers,
+ pCreateInfo->pBindings[i].descriptorCount *
+ sizeof(VkSampler));
}
}
pNewNode->layout = *pSetLayout;
@@ -4030,11 +5114,13 @@
uint32_t offset = 0;
uint32_t j = 0;
VkDescriptorType dType;
- for (uint32_t i=0; i<pCreateInfo->bindingCount; i++) {
+ for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
dType = pCreateInfo->pBindings[i].descriptorType;
- for (j = 0; j < pCreateInfo->pBindings[i].descriptorCount; j++) {
+ for (j = 0; j < pCreateInfo->pBindings[i].descriptorCount;
+ j++) {
pNewNode->descriptorTypes[offset + j] = dType;
- pNewNode->stageFlags[offset + j] = pCreateInfo->pBindings[i].stageFlags;
+ pNewNode->stageFlags[offset + j] =
+ pCreateInfo->pBindings[i].stageFlags;
if ((dType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
(dType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)) {
pNewNode->dynamicDescriptorCount++;
@@ -4054,40 +5140,60 @@
return result;
}
-VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreatePipelineLayout(VkDevice device,
+ const VkPipelineLayoutCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkPipelineLayout *pPipelineLayout) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreatePipelineLayout(
+ device, pCreateInfo, pAllocator, pPipelineLayout);
if (VK_SUCCESS == result) {
// TODOSC : Merge capture of the setLayouts per pipeline
- PIPELINE_LAYOUT_NODE& plNode = dev_data->pipelineLayoutMap[*pPipelineLayout];
+ PIPELINE_LAYOUT_NODE &plNode =
+ dev_data->pipelineLayoutMap[*pPipelineLayout];
plNode.descriptorSetLayouts.resize(pCreateInfo->setLayoutCount);
uint32_t i = 0;
- for (i=0; i<pCreateInfo->setLayoutCount; ++i) {
+ for (i = 0; i < pCreateInfo->setLayoutCount; ++i) {
plNode.descriptorSetLayouts[i] = pCreateInfo->pSetLayouts[i];
}
plNode.pushConstantRanges.resize(pCreateInfo->pushConstantRangeCount);
- for (i=0; i<pCreateInfo->pushConstantRangeCount; ++i) {
+ for (i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
plNode.pushConstantRanges[i] = pCreateInfo->pPushConstantRanges[i];
}
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDescriptorPool(VkDevice device,
+ const VkDescriptorPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorPool *pDescriptorPool) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateDescriptorPool(
+ device, pCreateInfo, pAllocator, pDescriptorPool);
if (VK_SUCCESS == result) {
// Insert this pool into Global Pool LL at head
- if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, (uint64_t) *pDescriptorPool, __LINE__, DRAWSTATE_OUT_OF_MEMORY, "DS",
- "Created Descriptor Pool %#" PRIxLEAST64, (uint64_t) *pDescriptorPool))
+ if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ (uint64_t)*pDescriptorPool, __LINE__,
+ DRAWSTATE_OUT_OF_MEMORY, "DS",
+ "Created Descriptor Pool %#" PRIxLEAST64,
+ (uint64_t)*pDescriptorPool))
return VK_ERROR_VALIDATION_FAILED_EXT;
loader_platform_thread_lock_mutex(&globalLock);
- DESCRIPTOR_POOL_NODE* pNewNode = new DESCRIPTOR_POOL_NODE(*pDescriptorPool, pCreateInfo);
+ DESCRIPTOR_POOL_NODE *pNewNode =
+ new DESCRIPTOR_POOL_NODE(*pDescriptorPool, pCreateInfo);
if (NULL == pNewNode) {
- if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, (uint64_t) *pDescriptorPool, __LINE__, DRAWSTATE_OUT_OF_MEMORY, "DS",
- "Out of memory while attempting to allocate DESCRIPTOR_POOL_NODE in vkCreateDescriptorPool()"))
+ if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ (uint64_t)*pDescriptorPool, __LINE__,
+ DRAWSTATE_OUT_OF_MEMORY, "DS",
+ "Out of memory while attempting to allocate "
+ "DESCRIPTOR_POOL_NODE in vkCreateDescriptorPool()"))
return VK_ERROR_VALIDATION_FAILED_EXT;
} else {
dev_data->descriptorPoolMap[*pDescriptorPool] = pNewNode;
@@ -4099,59 +5205,100 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->ResetDescriptorPool(device, descriptorPool, flags);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+ VkDescriptorPoolResetFlags flags) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->ResetDescriptorPool(
+ device, descriptorPool, flags);
if (VK_SUCCESS == result) {
clearDescriptorPool(dev_data, device, descriptorPool, flags);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAllocateDescriptorSets(VkDevice device,
+ const VkDescriptorSetAllocateInfo *pAllocateInfo,
+ VkDescriptorSet *pDescriptorSets) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
// Verify that requested descriptorSets are available in pool
- DESCRIPTOR_POOL_NODE *pPoolNode = getPoolNode(dev_data, pAllocateInfo->descriptorPool);
+ DESCRIPTOR_POOL_NODE *pPoolNode =
+ getPoolNode(dev_data, pAllocateInfo->descriptorPool);
if (!pPoolNode) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, (uint64_t) pAllocateInfo->descriptorPool, __LINE__, DRAWSTATE_INVALID_POOL, "DS",
- "Unable to find pool node for pool %#" PRIxLEAST64 " specified in vkAllocateDescriptorSets() call", (uint64_t) pAllocateInfo->descriptorPool);
- } else { // Make sure pool has all the available descriptors before calling down chain
- skipCall |= validate_descriptor_availability_in_pool(dev_data, pPoolNode, pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts);
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ (uint64_t)pAllocateInfo->descriptorPool, __LINE__,
+ DRAWSTATE_INVALID_POOL, "DS",
+ "Unable to find pool node for pool %#" PRIxLEAST64
+ " specified in vkAllocateDescriptorSets() call",
+ (uint64_t)pAllocateInfo->descriptorPool);
+ } else { // Make sure pool has all the available descriptors before calling
+ // down chain
+ skipCall |= validate_descriptor_availability_in_pool(
+ dev_data, pPoolNode, pAllocateInfo->descriptorSetCount,
+ pAllocateInfo->pSetLayouts);
}
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = dev_data->device_dispatch_table->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+ VkResult result = dev_data->device_dispatch_table->AllocateDescriptorSets(
+ device, pAllocateInfo, pDescriptorSets);
if (VK_SUCCESS == result) {
- DESCRIPTOR_POOL_NODE *pPoolNode = getPoolNode(dev_data, pAllocateInfo->descriptorPool);
+ DESCRIPTOR_POOL_NODE *pPoolNode =
+ getPoolNode(dev_data, pAllocateInfo->descriptorPool);
if (pPoolNode) {
if (pAllocateInfo->descriptorSetCount == 0) {
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, pAllocateInfo->descriptorSetCount, __LINE__, DRAWSTATE_NONE, "DS",
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ pAllocateInfo->descriptorSetCount, __LINE__,
+ DRAWSTATE_NONE, "DS",
"AllocateDescriptorSets called with 0 count");
}
for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDescriptorSets[i], __LINE__, DRAWSTATE_NONE, "DS",
- "Created Descriptor Set %#" PRIxLEAST64, (uint64_t) pDescriptorSets[i]);
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDescriptorSets[i], __LINE__, DRAWSTATE_NONE,
+ "DS", "Created Descriptor Set %#" PRIxLEAST64,
+ (uint64_t)pDescriptorSets[i]);
// Create new set node and add to head of pool nodes
- SET_NODE* pNewNode = new SET_NODE;
+ SET_NODE *pNewNode = new SET_NODE;
if (NULL == pNewNode) {
- if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDescriptorSets[i], __LINE__, DRAWSTATE_OUT_OF_MEMORY, "DS",
- "Out of memory while attempting to allocate SET_NODE in vkAllocateDescriptorSets()"))
+ if (log_msg(dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDescriptorSets[i], __LINE__,
+ DRAWSTATE_OUT_OF_MEMORY, "DS",
+ "Out of memory while attempting to allocate "
+ "SET_NODE in vkAllocateDescriptorSets()"))
return VK_ERROR_VALIDATION_FAILED_EXT;
} else {
- // TODO : Pool should store a total count of each type of Descriptor available
- // When descriptors are allocated, decrement the count and validate here
- // that the count doesn't go below 0. One reset/free need to bump count back up.
+ // TODO : Pool should store a total count of each type of
+ // Descriptor available
+ // When descriptors are allocated, decrement the count and
+ // validate here
+ // that the count doesn't go below 0. One reset/free need
+ // to bump count back up.
// Insert set at head of Set LL for this pool
pNewNode->pNext = pPoolNode->pSets;
pNewNode->in_use.store(0);
pPoolNode->pSets = pNewNode;
- LAYOUT_NODE* pLayout = getLayoutNode(dev_data, pAllocateInfo->pSetLayouts[i]);
+ LAYOUT_NODE *pLayout =
+ getLayoutNode(dev_data, pAllocateInfo->pSetLayouts[i]);
if (NULL == pLayout) {
- if (log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, (uint64_t) pAllocateInfo->pSetLayouts[i], __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS",
- "Unable to find set layout node for layout %#" PRIxLEAST64 " specified in vkAllocateDescriptorSets() call", (uint64_t) pAllocateInfo->pSetLayouts[i]))
+ if (log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+ (uint64_t)pAllocateInfo->pSetLayouts[i],
+ __LINE__, DRAWSTATE_INVALID_LAYOUT, "DS",
+ "Unable to find set layout node for layout "
+ "%#" PRIxLEAST64
+ " specified in vkAllocateDescriptorSets() call",
+ (uint64_t)pAllocateInfo->pSetLayouts[i]))
return VK_ERROR_VALIDATION_FAILED_EXT;
}
pNewNode->pLayout = pLayout;
@@ -4159,8 +5306,10 @@
pNewNode->set = pDescriptorSets[i];
pNewNode->descriptorCount = pLayout->endIndex + 1;
if (pNewNode->descriptorCount) {
- size_t descriptorArraySize = sizeof(GENERIC_HEADER*)*pNewNode->descriptorCount;
- pNewNode->ppDescriptors = new GENERIC_HEADER*[descriptorArraySize];
+ size_t descriptorArraySize = sizeof(GENERIC_HEADER *) *
+ pNewNode->descriptorCount;
+ pNewNode->ppDescriptors =
+ new GENERIC_HEADER *[descriptorArraySize];
memset(pNewNode->ppDescriptors, 0, descriptorArraySize);
}
dev_data->setMap[pDescriptorSets[i]] = pNewNode;
@@ -4171,33 +5320,50 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
+ uint32_t count,
+ const VkDescriptorSet *pDescriptorSets) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
// Make sure that no sets being destroyed are in-flight
- for (uint32_t i=0; i<count; ++i)
- skipCall |= validateIdleDescriptorSet(dev_data, pDescriptorSets[i], "vkFreeDesriptorSets");
+ for (uint32_t i = 0; i < count; ++i)
+ skipCall |= validateIdleDescriptorSet(dev_data, pDescriptorSets[i],
+ "vkFreeDesriptorSets");
DESCRIPTOR_POOL_NODE *pPoolNode = getPoolNode(dev_data, descriptorPool);
- if (pPoolNode && !(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT & pPoolNode->createInfo.flags)) {
+ if (pPoolNode &&
+ !(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT &
+ pPoolNode->createInfo.flags)) {
// Can't Free from a NON_FREE pool
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, (uint64_t)device, __LINE__, DRAWSTATE_CANT_FREE_FROM_NON_FREE_POOL, "DS",
- "It is invalid to call vkFreeDescriptorSets() with a pool created without setting VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT.");
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, (uint64_t)device,
+ __LINE__, DRAWSTATE_CANT_FREE_FROM_NON_FREE_POOL, "DS",
+ "It is invalid to call vkFreeDescriptorSets() with a pool "
+ "created without setting "
+ "VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT.");
}
if (VK_FALSE != skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = dev_data->device_dispatch_table->FreeDescriptorSets(device, descriptorPool, count, pDescriptorSets);
+ VkResult result = dev_data->device_dispatch_table->FreeDescriptorSets(
+ device, descriptorPool, count, pDescriptorSets);
if (VK_SUCCESS == result) {
// For each freed descriptor add it back into the pool as available
- for (uint32_t i=0; i<count; ++i) {
- SET_NODE* pSet = dev_data->setMap[pDescriptorSets[i]]; // getSetNode() without locking
+ for (uint32_t i = 0; i < count; ++i) {
+ SET_NODE *pSet =
+ dev_data->setMap[pDescriptorSets[i]]; // getSetNode() without
+ // locking
invalidateBoundCmdBuffers(dev_data, pSet);
- LAYOUT_NODE* pLayout = pSet->pLayout;
+ LAYOUT_NODE *pLayout = pSet->pLayout;
uint32_t typeIndex = 0, poolSizeCount = 0;
- for (uint32_t j=0; j<pLayout->createInfo.bindingCount; ++j) {
- typeIndex = static_cast<uint32_t>(pLayout->createInfo.pBindings[j].descriptorType);
- poolSizeCount = pLayout->createInfo.pBindings[j].descriptorCount;
- pPoolNode->availableDescriptorTypeCount[typeIndex] += poolSizeCount;
+ for (uint32_t j = 0; j < pLayout->createInfo.bindingCount; ++j) {
+ typeIndex = static_cast<uint32_t>(
+ pLayout->createInfo.pBindings[j].descriptorType);
+ poolSizeCount =
+ pLayout->createInfo.pBindings[j].descriptorCount;
+ pPoolNode->availableDescriptorTypeCount[typeIndex] +=
+ poolSizeCount;
}
}
}
@@ -4205,33 +5371,47 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies)
-{
- // dsUpdate will return VK_TRUE only if a bailout error occurs, so we want to call down tree when update returns VK_FALSE
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- if (!dsUpdate(dev_data, device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies)) {
- dev_data->device_dispatch_table->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet *pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet *pDescriptorCopies) {
+ // dsUpdate will return VK_TRUE only if a bailout error occurs, so we want
+ // to call down tree when update returns VK_FALSE
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ if (!dsUpdate(dev_data, device, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies)) {
+ dev_data->device_dispatch_table->UpdateDescriptorSets(
+ device, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo* pCreateInfo, VkCommandBuffer* pCommandBuffer)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->AllocateCommandBuffers(device, pCreateInfo, pCommandBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAllocateCommandBuffers(VkDevice device,
+ const VkCommandBufferAllocateInfo *pCreateInfo,
+ VkCommandBuffer *pCommandBuffer) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->AllocateCommandBuffers(
+ device, pCreateInfo, pCommandBuffer);
if (VK_SUCCESS == result) {
for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
// Validate command pool
- if (dev_data->commandPoolMap.find(pCreateInfo->commandPool) != dev_data->commandPoolMap.end()) {
+ if (dev_data->commandPoolMap.find(pCreateInfo->commandPool) !=
+ dev_data->commandPoolMap.end()) {
loader_platform_thread_lock_mutex(&globalLock);
// Add command buffer to its commandPool map
- dev_data->commandPoolMap[pCreateInfo->commandPool].commandBuffers.push_back(pCommandBuffer[i]);
- GLOBAL_CB_NODE* pCB = new GLOBAL_CB_NODE;
+ dev_data->commandPoolMap[pCreateInfo->commandPool]
+ .commandBuffers.push_back(pCommandBuffer[i]);
+ GLOBAL_CB_NODE *pCB = new GLOBAL_CB_NODE;
// Add command buffer to map
dev_data->commandBufferMap[pCommandBuffer[i]] = pCB;
resetCB(dev_data, pCommandBuffer[i]);
pCB->commandBuffer = pCommandBuffer[i];
- pCB->createInfo = *pCreateInfo;
- pCB->device = device;
+ pCB->createInfo = *pCreateInfo;
+ pCB->device = device;
loader_platform_thread_unlock_mutex(&globalLock);
}
}
@@ -4239,49 +5419,103 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkBeginCommandBuffer(VkCommandBuffer commandBuffer,
+ const VkCommandBufferBeginInfo *pBeginInfo) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
// Validate command buffer level
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
if (pCB->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
// Secondary Command Buffer
- // TODO : Add check here from spec "If commandBuffer is a secondary command buffer and either the
- // occlusionQueryEnable member of pBeginInfo is VK_FALSE, or the precise occlusion queries feature
- // is not enabled, the queryFlags member of pBeginInfo must not contain VK_QUERY_CONTROL_PRECISE_BIT"
- const VkCommandBufferInheritanceInfo *pInfo = pBeginInfo->pInheritanceInfo;
- if (pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
- if (!pInfo->renderPass) { // renderpass should NOT be null for an Secondary CB
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__, DRAWSTATE_BEGIN_CB_INVALID_STATE, "DS",
- "vkBeginCommandBuffer(): Secondary Command Buffers (%p) must specify a valid renderpass parameter.", (void*)commandBuffer);
+ // TODO : Add check here from spec "If commandBuffer is a secondary
+ // command buffer and either the
+ // occlusionQueryEnable member of pBeginInfo is VK_FALSE, or the
+ // precise occlusion queries feature
+ // is not enabled, the queryFlags member of pBeginInfo must not
+ // contain VK_QUERY_CONTROL_PRECISE_BIT"
+ const VkCommandBufferInheritanceInfo *pInfo =
+ pBeginInfo->pInheritanceInfo;
+ if (pBeginInfo->flags &
+ VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
+ if (!pInfo->renderPass) { // renderpass should NOT be null for
+ // an Secondary CB
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_BEGIN_CB_INVALID_STATE, "DS",
+ "vkBeginCommandBuffer(): Secondary Command Buffers "
+ "(%p) must specify a valid renderpass parameter.",
+ (void *)commandBuffer);
}
- if (!pInfo->framebuffer) { // framebuffer may be null for an Secondary CB, but this affects perf
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__, DRAWSTATE_BEGIN_CB_INVALID_STATE, "DS",
- "vkBeginCommandBuffer(): Secondary Command Buffers (%p) may perform better if a valid framebuffer parameter is specified.", (void*)commandBuffer);
+ if (!pInfo->framebuffer) { // framebuffer may be null for an
+ // Secondary CB, but this affects
+ // perf
+ skipCall |=
+ log_msg(dev_data->report_data,
+ VK_DEBUG_REPORT_PERF_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_BEGIN_CB_INVALID_STATE, "DS",
+ "vkBeginCommandBuffer(): Secondary Command "
+ "Buffers (%p) may perform better if a valid "
+ "framebuffer parameter is specified.",
+ (void *)commandBuffer);
} else {
string errorString = "";
- VkRenderPass fbRP = dev_data->frameBufferMap[pInfo->framebuffer]->renderPass;
- if (!verify_renderpass_compatibility(dev_data, fbRP, pInfo->renderPass, errorString)) {
- // renderPass that framebuffer was created with must be compatible with local renderPass
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__, DRAWSTATE_RENDERPASS_INCOMPATIBLE, "DS",
- "vkBeginCommandBuffer(): Secondary Command Buffer (%p) renderPass (%#" PRIxLEAST64 ") is incompatible w/ framebuffer (%#" PRIxLEAST64 ") w/ render pass (%#" PRIxLEAST64 ") due to: %s",
- (void*)commandBuffer, (uint64_t)pInfo->renderPass, (uint64_t)pInfo->framebuffer, (uint64_t)fbRP, errorString.c_str());
+ VkRenderPass fbRP =
+ dev_data->frameBufferMap[pInfo->framebuffer]
+ ->renderPass;
+ if (!verify_renderpass_compatibility(
+ dev_data, fbRP, pInfo->renderPass, errorString)) {
+ // renderPass that framebuffer was created with must be
+ // compatible with local renderPass
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_RENDERPASS_INCOMPATIBLE, "DS",
+ "vkBeginCommandBuffer(): Secondary Command Buffer "
+ "(%p) renderPass (%#" PRIxLEAST64
+ ") is incompatible w/ framebuffer (%#" PRIxLEAST64
+ ") w/ render pass (%#" PRIxLEAST64 ") due to: %s",
+ (void *)commandBuffer, (uint64_t)pInfo->renderPass,
+ (uint64_t)pInfo->framebuffer, (uint64_t)fbRP,
+ errorString.c_str());
}
}
}
}
if (CB_RECORDING == pCB->state) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__, DRAWSTATE_BEGIN_CB_INVALID_STATE, "DS",
- "vkBeginCommandBuffer(): Cannot call Begin on CB (%#" PRIxLEAST64 ") in the RECORDING state. Must first call vkEndCommandBuffer().", (uint64_t)commandBuffer);
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_BEGIN_CB_INVALID_STATE, "DS",
+ "vkBeginCommandBuffer(): Cannot call Begin on CB "
+ "(%#" PRIxLEAST64 ") in the RECORDING state. Must "
+ "first call vkEndCommandBuffer().",
+ (uint64_t)commandBuffer);
} else if (CB_RECORDED == pCB->state) {
VkCommandPool cmdPool = pCB->createInfo.commandPool;
- if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & dev_data->commandPoolMap[cmdPool].createFlags)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer,
- __LINE__, DRAWSTATE_INVALID_COMMAND_BUFFER_RESET, "DS",
- "Call to vkBeginCommandBuffer() on command buffer (%#" PRIxLEAST64 ") attempts to implicitly reset cmdBuffer created from command pool (%#" PRIxLEAST64 ") that does NOT have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
- (uint64_t) commandBuffer, (uint64_t) cmdPool);
+ if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT &
+ dev_data->commandPoolMap[cmdPool].createFlags)) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_INVALID_COMMAND_BUFFER_RESET, "DS",
+ "Call to vkBeginCommandBuffer() on command buffer "
+ "(%#" PRIxLEAST64 ") attempts to implicitly reset "
+ "cmdBuffer created from command pool "
+ "(%#" PRIxLEAST64
+ ") that does NOT have the "
+ "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
+ (uint64_t)commandBuffer, (uint64_t)cmdPool);
}
loader_platform_thread_lock_mutex(&globalLock);
resetCB(dev_data, commandBuffer);
@@ -4297,29 +5531,38 @@
}
loader_platform_thread_unlock_mutex(&globalLock);
} else {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__, DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
- "In vkBeginCommandBuffer() and unable to find CommandBuffer Node for CB %p!", (void*)commandBuffer);
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__, DRAWSTATE_INVALID_COMMAND_BUFFER,
+ "DS", "In vkBeginCommandBuffer() and unable to find CommandBuffer "
+ "Node for CB %p!",
+ (void *)commandBuffer);
}
if (VK_FALSE != skipCall) {
return VK_ERROR_VALIDATION_FAILED_EXT;
}
- VkResult result = dev_data->device_dispatch_table->BeginCommandBuffer(commandBuffer, pBeginInfo);
+ VkResult result = dev_data->device_dispatch_table->BeginCommandBuffer(
+ commandBuffer, pBeginInfo);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEndCommandBuffer(VkCommandBuffer commandBuffer) {
VkBool32 skipCall = VK_FALSE;
VkResult result = VK_SUCCESS;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
if (pCB->state != CB_RECORDING) {
- skipCall |= report_error_no_cb_begin(dev_data, commandBuffer, "vkEndCommandBuffer()");
+ skipCall |= report_error_no_cb_begin(dev_data, commandBuffer,
+ "vkEndCommandBuffer()");
}
}
if (VK_FALSE == skipCall) {
- result = dev_data->device_dispatch_table->EndCommandBuffer(commandBuffer);
+ result =
+ dev_data->device_dispatch_table->EndCommandBuffer(commandBuffer);
if (VK_SUCCESS == result) {
pCB->state = CB_RECORDED;
// Reset CB status flags
@@ -4332,21 +5575,31 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetCommandBuffer(VkCommandBuffer commandBuffer,
+ VkCommandBufferResetFlags flags) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
VkCommandPool cmdPool = pCB->createInfo.commandPool;
- if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT & dev_data->commandPoolMap[cmdPool].createFlags)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t) commandBuffer,
- __LINE__, DRAWSTATE_INVALID_COMMAND_BUFFER_RESET, "DS",
- "Attempt to reset command buffer (%#" PRIxLEAST64 ") created from command pool (%#" PRIxLEAST64 ") that does NOT have the VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
- (uint64_t) commandBuffer, (uint64_t) cmdPool);
+ if (!(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT &
+ dev_data->commandPoolMap[cmdPool].createFlags)) {
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_INVALID_COMMAND_BUFFER_RESET, "DS",
+ "Attempt to reset command buffer (%#" PRIxLEAST64
+ ") created from command pool (%#" PRIxLEAST64
+ ") that does NOT have the "
+ "VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set.",
+ (uint64_t)commandBuffer, (uint64_t)cmdPool);
}
if (skipCall != VK_FALSE)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = dev_data->device_dispatch_table->ResetCommandBuffer(commandBuffer, flags);
+ VkResult result = dev_data->device_dispatch_table->ResetCommandBuffer(
+ commandBuffer, flags);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
resetCB(dev_data, commandBuffer);
@@ -4355,88 +5608,106 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindPipeline(VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipeline pipeline) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_BINDPIPELINE, "vkCmdBindPipeline()");
- if ((VK_PIPELINE_BIND_POINT_COMPUTE == pipelineBindPoint) && (pCB->activeRenderPass)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, (uint64_t) pipeline,
- __LINE__, DRAWSTATE_INVALID_RENDERPASS_CMD, "DS",
- "Incorrectly binding compute pipeline (%#" PRIxLEAST64 ") during active RenderPass (%#" PRIxLEAST64 ")",
- (uint64_t) pipeline, (uint64_t) pCB->activeRenderPass);
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_BINDPIPELINE, "vkCmdBindPipeline()");
+ if ((VK_PIPELINE_BIND_POINT_COMPUTE == pipelineBindPoint) &&
+ (pCB->activeRenderPass)) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, (uint64_t)pipeline,
+ __LINE__, DRAWSTATE_INVALID_RENDERPASS_CMD, "DS",
+ "Incorrectly binding compute pipeline (%#" PRIxLEAST64
+ ") during active RenderPass (%#" PRIxLEAST64 ")",
+ (uint64_t)pipeline, (uint64_t)pCB->activeRenderPass);
} else if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
- skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdBindPipeline");
+ skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdBindPipeline");
}
- PIPELINE_NODE* pPN = getPipeline(dev_data, pipeline);
+ PIPELINE_NODE *pPN = getPipeline(dev_data, pipeline);
if (pPN) {
pCB->lastBoundPipeline = pipeline;
loader_platform_thread_lock_mutex(&globalLock);
set_cb_pso_status(pCB, pPN);
loader_platform_thread_unlock_mutex(&globalLock);
- skipCall |= validatePipelineState(dev_data, pCB, pipelineBindPoint, pipeline);
+ skipCall |= validatePipelineState(dev_data, pCB, pipelineBindPoint,
+ pipeline);
} else {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
- (uint64_t) pipeline, __LINE__, DRAWSTATE_INVALID_PIPELINE, "DS",
- "Attempt to bind Pipeline %#" PRIxLEAST64 " that doesn't exist!", (uint64_t)(pipeline));
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, (uint64_t)pipeline,
+ __LINE__, DRAWSTATE_INVALID_PIPELINE, "DS",
+ "Attempt to bind Pipeline %#" PRIxLEAST64
+ " that doesn't exist!",
+ (uint64_t)(pipeline));
}
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+ dev_data->device_dispatch_table->CmdBindPipeline(
+ commandBuffer, pipelineBindPoint, pipeline);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(
- VkCommandBuffer commandBuffer,
- uint32_t firstViewport,
- uint32_t viewportCount,
- const VkViewport* pViewports)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+ uint32_t viewportCount, const VkViewport *pViewports) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETVIEWPORTSTATE, "vkCmdSetViewport()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_SETVIEWPORTSTATE, "vkCmdSetViewport()");
loader_platform_thread_lock_mutex(&globalLock);
pCB->status |= CBSTATUS_VIEWPORT_SET;
pCB->viewports.resize(viewportCount);
- memcpy(pCB->viewports.data(), pViewports, viewportCount * sizeof(VkViewport));
+ memcpy(pCB->viewports.data(), pViewports,
+ viewportCount * sizeof(VkViewport));
loader_platform_thread_unlock_mutex(&globalLock);
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+ dev_data->device_dispatch_table->CmdSetViewport(
+ commandBuffer, firstViewport, viewportCount, pViewports);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(
- VkCommandBuffer commandBuffer,
- uint32_t firstScissor,
- uint32_t scissorCount,
- const VkRect2D* pScissors)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
+ uint32_t scissorCount, const VkRect2D *pScissors) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETSCISSORSTATE, "vkCmdSetScissor()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_SETSCISSORSTATE, "vkCmdSetScissor()");
loader_platform_thread_lock_mutex(&globalLock);
pCB->status |= CBSTATUS_SCISSOR_SET;
pCB->scissors.resize(scissorCount);
- memcpy(pCB->scissors.data(), pScissors, scissorCount * sizeof(VkRect2D));
+ memcpy(pCB->scissors.data(), pScissors,
+ scissorCount * sizeof(VkRect2D));
loader_platform_thread_unlock_mutex(&globalLock);
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+ dev_data->device_dispatch_table->CmdSetScissor(
+ commandBuffer, firstScissor, scissorCount, pScissors);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETLINEWIDTHSTATE, "vkCmdSetLineWidth()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_SETLINEWIDTHSTATE, "vkCmdSetLineWidth()");
/* TODO: Do we still need this lock? */
loader_platform_thread_lock_mutex(&globalLock);
pCB->status |= CBSTATUS_LINE_WIDTH_SET;
@@ -4444,71 +5715,80 @@
loader_platform_thread_unlock_mutex(&globalLock);
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetLineWidth(commandBuffer, lineWidth);
+ dev_data->device_dispatch_table->CmdSetLineWidth(commandBuffer,
+ lineWidth);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(
- VkCommandBuffer commandBuffer,
- float depthBiasConstantFactor,
- float depthBiasClamp,
- float depthBiasSlopeFactor)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetDepthBias(VkCommandBuffer commandBuffer,
+ float depthBiasConstantFactor, float depthBiasClamp,
+ float depthBiasSlopeFactor) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETDEPTHBIASSTATE, "vkCmdSetDepthBias()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_SETDEPTHBIASSTATE, "vkCmdSetDepthBias()");
pCB->status |= CBSTATUS_DEPTH_BIAS_SET;
pCB->depthBiasConstantFactor = depthBiasConstantFactor;
pCB->depthBiasClamp = depthBiasClamp;
pCB->depthBiasSlopeFactor = depthBiasSlopeFactor;
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+ dev_data->device_dispatch_table->CmdSetDepthBias(
+ commandBuffer, depthBiasConstantFactor, depthBiasClamp,
+ depthBiasSlopeFactor);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,
+ const float blendConstants[4]) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETBLENDSTATE, "vkCmdSetBlendConstants()");
+ skipCall |= addCmd(dev_data, pCB, CMD_SETBLENDSTATE,
+ "vkCmdSetBlendConstants()");
pCB->status |= CBSTATUS_BLEND_SET;
memcpy(pCB->blendConstants, blendConstants, 4 * sizeof(float));
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetBlendConstants(commandBuffer, blendConstants);
+ dev_data->device_dispatch_table->CmdSetBlendConstants(commandBuffer,
+ blendConstants);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(
- VkCommandBuffer commandBuffer,
- float minDepthBounds,
- float maxDepthBounds)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
+ float maxDepthBounds) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETDEPTHBOUNDSSTATE, "vkCmdSetDepthBounds()");
+ skipCall |= addCmd(dev_data, pCB, CMD_SETDEPTHBOUNDSSTATE,
+ "vkCmdSetDepthBounds()");
pCB->status |= CBSTATUS_DEPTH_BOUNDS_SET;
pCB->minDepthBounds = minDepthBounds;
pCB->maxDepthBounds = maxDepthBounds;
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+ dev_data->device_dispatch_table->CmdSetDepthBounds(
+ commandBuffer, minDepthBounds, maxDepthBounds);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t compareMask)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask,
+ uint32_t compareMask) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETSTENCILREADMASKSTATE, "vkCmdSetStencilCompareMask()");
+ skipCall |= addCmd(dev_data, pCB, CMD_SETSTENCILREADMASKSTATE,
+ "vkCmdSetStencilCompareMask()");
if (faceMask & VK_STENCIL_FACE_FRONT_BIT) {
pCB->front.compareMask = compareMask;
}
@@ -4520,19 +5800,20 @@
pCB->status |= CBSTATUS_STENCIL_READ_MASK_SET;
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+ dev_data->device_dispatch_table->CmdSetStencilCompareMask(
+ commandBuffer, faceMask, compareMask);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t writeMask)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask, uint32_t writeMask) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETSTENCILWRITEMASKSTATE, "vkCmdSetStencilWriteMask()");
+ skipCall |= addCmd(dev_data, pCB, CMD_SETSTENCILWRITEMASKSTATE,
+ "vkCmdSetStencilWriteMask()");
if (faceMask & VK_STENCIL_FACE_FRONT_BIT) {
pCB->front.writeMask = writeMask;
}
@@ -4542,19 +5823,20 @@
pCB->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+ dev_data->device_dispatch_table->CmdSetStencilWriteMask(
+ commandBuffer, faceMask, writeMask);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(
- VkCommandBuffer commandBuffer,
- VkStencilFaceFlags faceMask,
- uint32_t reference)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetStencilReference(VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask, uint32_t reference) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_SETSTENCILREFERENCESTATE, "vkCmdSetStencilReference()");
+ skipCall |= addCmd(dev_data, pCB, CMD_SETSTENCILREFERENCESTATE,
+ "vkCmdSetStencilReference()");
if (faceMask & VK_STENCIL_FACE_FRONT_BIT) {
pCB->front.reference = reference;
}
@@ -4564,155 +5846,329 @@
pCB->status |= CBSTATUS_STENCIL_REFERENCE_SET;
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetStencilReference(commandBuffer, faceMask, reference);
+ dev_data->device_dispatch_table->CmdSetStencilReference(
+ commandBuffer, faceMask, reference);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(
+ VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+ VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
+ const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
+ const uint32_t *pDynamicOffsets) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
if (pCB->state == CB_RECORDING) {
- if ((VK_PIPELINE_BIND_POINT_COMPUTE == pipelineBindPoint) && (pCB->activeRenderPass)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS_CMD, "DS",
- "Incorrectly binding compute DescriptorSets during active RenderPass (%#" PRIxLEAST64 ")", (uint64_t) pCB->activeRenderPass);
+ if ((VK_PIPELINE_BIND_POINT_COMPUTE == pipelineBindPoint) &&
+ (pCB->activeRenderPass)) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS_CMD, "DS",
+ "Incorrectly binding compute DescriptorSets during active "
+ "RenderPass (%#" PRIxLEAST64 ")",
+ (uint64_t)pCB->activeRenderPass);
} else if (VK_PIPELINE_BIND_POINT_GRAPHICS == pipelineBindPoint) {
- skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdBindDescriptorSets");
+ skipCall |=
+ outsideRenderPass(dev_data, pCB, "vkCmdBindDescriptorSets");
}
if (VK_FALSE == skipCall) {
- // Track total count of dynamic descriptor types to make sure we have an offset for each one
+ // Track total count of dynamic descriptor types to make sure we
+ // have an offset for each one
uint32_t totalDynamicDescriptors = 0;
string errorString = "";
- uint32_t lastSetIndex = firstSet+setCount-1;
+ uint32_t lastSetIndex = firstSet + setCount - 1;
if (lastSetIndex >= pCB->boundDescriptorSets.size())
- pCB->boundDescriptorSets.resize(lastSetIndex+1);
- VkDescriptorSet oldFinalBoundSet = pCB->boundDescriptorSets[lastSetIndex];
- for (uint32_t i=0; i<setCount; i++) {
- SET_NODE* pSet = getSetNode(dev_data, pDescriptorSets[i]);
+ pCB->boundDescriptorSets.resize(lastSetIndex + 1);
+ VkDescriptorSet oldFinalBoundSet =
+ pCB->boundDescriptorSets[lastSetIndex];
+ for (uint32_t i = 0; i < setCount; i++) {
+ SET_NODE *pSet = getSetNode(dev_data, pDescriptorSets[i]);
if (pSet) {
loader_platform_thread_lock_mutex(&globalLock);
pCB->uniqueBoundSets.insert(pDescriptorSets[i]);
pSet->boundCmdBuffers.insert(commandBuffer);
pCB->lastBoundDescriptorSet = pDescriptorSets[i];
pCB->lastBoundPipelineLayout = layout;
- pCB->boundDescriptorSets[i+firstSet] = pDescriptorSets[i];
+ pCB->boundDescriptorSets[i + firstSet] =
+ pDescriptorSets[i];
loader_platform_thread_unlock_mutex(&globalLock);
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDescriptorSets[i], __LINE__, DRAWSTATE_NONE, "DS",
- "DS %#" PRIxLEAST64 " bound on pipeline %s", (uint64_t) pDescriptorSets[i], string_VkPipelineBindPoint(pipelineBindPoint));
- if (!pSet->pUpdateStructs && (pSet->descriptorCount != 0)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDescriptorSets[i], __LINE__, DRAWSTATE_DESCRIPTOR_SET_NOT_UPDATED, "DS",
- "DS %#" PRIxLEAST64 " bound but it was never updated. You may want to either update it or not bind it.", (uint64_t) pDescriptorSets[i]);
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDescriptorSets[i], __LINE__,
+ DRAWSTATE_NONE, "DS",
+ "DS %#" PRIxLEAST64 " bound on pipeline %s",
+ (uint64_t)pDescriptorSets[i],
+ string_VkPipelineBindPoint(pipelineBindPoint));
+ if (!pSet->pUpdateStructs &&
+ (pSet->descriptorCount != 0)) {
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDescriptorSets[i], __LINE__,
+ DRAWSTATE_DESCRIPTOR_SET_NOT_UPDATED, "DS",
+ "DS %#" PRIxLEAST64
+ " bound but it was never updated. You may want "
+ "to either update it or not bind it.",
+ (uint64_t)pDescriptorSets[i]);
}
- // Verify that set being bound is compatible with overlapping setLayout of pipelineLayout
- if (!verify_set_layout_compatibility(dev_data, pSet, layout, i+firstSet, errorString)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDescriptorSets[i], __LINE__, DRAWSTATE_PIPELINE_LAYOUTS_INCOMPATIBLE, "DS",
- "descriptorSet #%u being bound is not compatible with overlapping layout in pipelineLayout due to: %s", i, errorString.c_str());
+ // Verify that set being bound is compatible with
+ // overlapping setLayout of pipelineLayout
+ if (!verify_set_layout_compatibility(
+ dev_data, pSet, layout, i + firstSet,
+ errorString)) {
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDescriptorSets[i], __LINE__,
+ DRAWSTATE_PIPELINE_LAYOUTS_INCOMPATIBLE, "DS",
+ "descriptorSet #%u being bound is not "
+ "compatible with overlapping layout in "
+ "pipelineLayout due to: %s",
+ i, errorString.c_str());
}
if (pSet->pLayout->dynamicDescriptorCount) {
- // First make sure we won't overstep bounds of pDynamicOffsets array
- if ((totalDynamicDescriptors + pSet->pLayout->dynamicDescriptorCount) > dynamicOffsetCount) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDescriptorSets[i], __LINE__, DRAWSTATE_INVALID_DYNAMIC_OFFSET_COUNT, "DS",
- "descriptorSet #%u (%#" PRIxLEAST64 ") requires %u dynamicOffsets, but only %u dynamicOffsets are left in pDynamicOffsets array. There must be one dynamic offset for each dynamic descriptor being bound.",
- i, (uint64_t) pDescriptorSets[i], pSet->pLayout->dynamicDescriptorCount, (dynamicOffsetCount - totalDynamicDescriptors));
- } else { // Validate and store dynamic offsets with the set
+ // First make sure we won't overstep bounds of
+ // pDynamicOffsets array
+ if ((totalDynamicDescriptors +
+ pSet->pLayout->dynamicDescriptorCount) >
+ dynamicOffsetCount) {
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDescriptorSets[i], __LINE__,
+ DRAWSTATE_INVALID_DYNAMIC_OFFSET_COUNT,
+ "DS", "descriptorSet #%u (%#" PRIxLEAST64
+ ") requires %u dynamicOffsets, but "
+ "only %u dynamicOffsets are left in "
+ "pDynamicOffsets array. There must "
+ "be one dynamic offset for each "
+ "dynamic descriptor being bound.",
+ i, (uint64_t)pDescriptorSets[i],
+ pSet->pLayout->dynamicDescriptorCount,
+ (dynamicOffsetCount -
+ totalDynamicDescriptors));
+ } else { // Validate and store dynamic offsets with
+ // the set
// Validate Dynamic Offset Minimums
- uint32_t cur_dyn_offset = totalDynamicDescriptors;
- for (uint32_t d = 0; d < pSet->descriptorCount; d++) {
- if (pSet->pLayout->descriptorTypes[i] == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
- if (vk_safe_modulo(pDynamicOffsets[cur_dyn_offset], dev_data->physDevPropertyMap[pCB->device].limits.minUniformBufferOffsetAlignment) != 0) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
- __LINE__, DRAWSTATE_INVALID_UNIFORM_BUFFER_OFFSET, "DS",
- "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of device limit minUniformBufferOffsetAlignment %#" PRIxLEAST64,
- cur_dyn_offset, pDynamicOffsets[cur_dyn_offset], dev_data->physDevPropertyMap[pCB->device].limits.minUniformBufferOffsetAlignment);
+ uint32_t cur_dyn_offset =
+ totalDynamicDescriptors;
+ for (uint32_t d = 0; d < pSet->descriptorCount;
+ d++) {
+ if (pSet->pLayout->descriptorTypes[i] ==
+ VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
+ if (vk_safe_modulo(
+ pDynamicOffsets[cur_dyn_offset],
+ dev_data
+ ->physDevPropertyMap
+ [pCB->device]
+ .limits
+ .minUniformBufferOffsetAlignment) !=
+ 0) {
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ 0, __LINE__,
+ DRAWSTATE_INVALID_UNIFORM_BUFFER_OFFSET,
+ "DS", "vkCmdBindDescriptorSets("
+ "): pDynamicOffsets[%d] "
+ "is %d but must be a "
+ "multiple of device "
+ "limit "
+ "minUniformBufferOffsetAl"
+ "ignment %#" PRIxLEAST64,
+ cur_dyn_offset,
+ pDynamicOffsets[cur_dyn_offset],
+ dev_data
+ ->physDevPropertyMap
+ [pCB->device]
+ .limits
+ .minUniformBufferOffsetAlignment);
}
cur_dyn_offset++;
- } else if (pSet->pLayout->descriptorTypes[i] == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
- if (vk_safe_modulo(pDynamicOffsets[cur_dyn_offset], dev_data->physDevPropertyMap[pCB->device].limits.minStorageBufferOffsetAlignment) != 0) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
- __LINE__, DRAWSTATE_INVALID_STORAGE_BUFFER_OFFSET, "DS",
- "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of device limit minStorageBufferOffsetAlignment %#" PRIxLEAST64,
- cur_dyn_offset, pDynamicOffsets[cur_dyn_offset], dev_data->physDevPropertyMap[pCB->device].limits.minStorageBufferOffsetAlignment);
+ } else if (pSet->pLayout
+ ->descriptorTypes[i] ==
+ VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
+ if (vk_safe_modulo(
+ pDynamicOffsets[cur_dyn_offset],
+ dev_data
+ ->physDevPropertyMap
+ [pCB->device]
+ .limits
+ .minStorageBufferOffsetAlignment) !=
+ 0) {
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ 0, __LINE__,
+ DRAWSTATE_INVALID_STORAGE_BUFFER_OFFSET,
+ "DS", "vkCmdBindDescriptorSets("
+ "): pDynamicOffsets[%d] "
+ "is %d but must be a "
+ "multiple of device "
+ "limit "
+ "minStorageBufferOffsetAl"
+ "ignment %#" PRIxLEAST64,
+ cur_dyn_offset,
+ pDynamicOffsets[cur_dyn_offset],
+ dev_data
+ ->physDevPropertyMap
+ [pCB->device]
+ .limits
+ .minStorageBufferOffsetAlignment);
}
cur_dyn_offset++;
}
}
- // Keep running total of dynamic descriptor count to verify at the end
- totalDynamicDescriptors += pSet->pLayout->dynamicDescriptorCount;
+ // Keep running total of dynamic descriptor
+ // count to verify at the end
+ totalDynamicDescriptors +=
+ pSet->pLayout->dynamicDescriptorCount;
}
}
} else {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pDescriptorSets[i], __LINE__, DRAWSTATE_INVALID_SET, "DS",
- "Attempt to bind DS %#" PRIxLEAST64 " that doesn't exist!", (uint64_t) pDescriptorSets[i]);
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pDescriptorSets[i], __LINE__,
+ DRAWSTATE_INVALID_SET, "DS",
+ "Attempt to bind DS %#" PRIxLEAST64
+ " that doesn't exist!",
+ (uint64_t)pDescriptorSets[i]);
}
}
- skipCall |= addCmd(dev_data, pCB, CMD_BINDDESCRIPTORSETS, "vkCmdBindDescrsiptorSets()");
- // For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
+ skipCall |= addCmd(dev_data, pCB, CMD_BINDDESCRIPTORSETS,
+ "vkCmdBindDescrsiptorSets()");
+ // For any previously bound sets, need to set them to "invalid"
+ // if they were disturbed by this update
if (firstSet > 0) { // Check set #s below the first bound set
- for (uint32_t i=0; i<firstSet; ++i) {
- if (pCB->boundDescriptorSets[i] && !verify_set_layout_compatibility(dev_data, dev_data->setMap[pCB->boundDescriptorSets[i]], layout, i, errorString)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) pCB->boundDescriptorSets[i], __LINE__, DRAWSTATE_NONE, "DS",
- "DescriptorSetDS %#" PRIxLEAST64 " previously bound as set #%u was disturbed by newly bound pipelineLayout (%#" PRIxLEAST64 ")", (uint64_t) pCB->boundDescriptorSets[i], i, (uint64_t) layout);
+ for (uint32_t i = 0; i < firstSet; ++i) {
+ if (pCB->boundDescriptorSets[i] &&
+ !verify_set_layout_compatibility(
+ dev_data,
+ dev_data->setMap[pCB->boundDescriptorSets[i]],
+ layout, i, errorString)) {
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_PERF_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)pCB->boundDescriptorSets[i], __LINE__,
+ DRAWSTATE_NONE, "DS",
+ "DescriptorSetDS %#" PRIxLEAST64
+ " previously bound as set #%u was disturbed by "
+ "newly bound pipelineLayout (%#" PRIxLEAST64
+ ")",
+ (uint64_t)pCB->boundDescriptorSets[i], i,
+ (uint64_t)layout);
pCB->boundDescriptorSets[i] = VK_NULL_HANDLE;
}
}
}
- // Check if newly last bound set invalidates any remaining bound sets
- if ((pCB->boundDescriptorSets.size()-1) > (lastSetIndex)) {
- if (oldFinalBoundSet && !verify_set_layout_compatibility(dev_data, dev_data->setMap[oldFinalBoundSet], layout, lastSetIndex, errorString)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t) oldFinalBoundSet, __LINE__, DRAWSTATE_NONE, "DS",
- "DescriptorSetDS %#" PRIxLEAST64 " previously bound as set #%u is incompatible with set %#" PRIxLEAST64 " newly bound as set #%u so set #%u and any subsequent sets were disturbed by newly bound pipelineLayout (%#" PRIxLEAST64 ")", (uint64_t) oldFinalBoundSet, lastSetIndex, (uint64_t) pCB->boundDescriptorSets[lastSetIndex], lastSetIndex, lastSetIndex+1, (uint64_t) layout);
- pCB->boundDescriptorSets.resize(lastSetIndex+1);
+ // Check if newly last bound set invalidates any remaining bound
+ // sets
+ if ((pCB->boundDescriptorSets.size() - 1) > (lastSetIndex)) {
+ if (oldFinalBoundSet &&
+ !verify_set_layout_compatibility(
+ dev_data, dev_data->setMap[oldFinalBoundSet],
+ layout, lastSetIndex, errorString)) {
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_PERF_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+ (uint64_t)oldFinalBoundSet, __LINE__,
+ DRAWSTATE_NONE, "DS",
+ "DescriptorSetDS %#" PRIxLEAST64
+ " previously bound as set #%u is incompatible with "
+ "set %#" PRIxLEAST64
+ " newly bound as set #%u so set #%u and any "
+ "subsequent sets were disturbed by newly bound "
+ "pipelineLayout (%#" PRIxLEAST64 ")",
+ (uint64_t)oldFinalBoundSet, lastSetIndex,
+ (uint64_t)pCB->boundDescriptorSets[lastSetIndex],
+ lastSetIndex, lastSetIndex + 1, (uint64_t)layout);
+ pCB->boundDescriptorSets.resize(lastSetIndex + 1);
}
}
- // dynamicOffsetCount must equal the total number of dynamic descriptors in the sets being bound
+ // dynamicOffsetCount must equal the total number of dynamic
+ // descriptors in the sets being bound
if (totalDynamicDescriptors != dynamicOffsetCount) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t) commandBuffer, __LINE__, DRAWSTATE_INVALID_DYNAMIC_OFFSET_COUNT, "DS",
- "Attempting to bind %u descriptorSets with %u dynamic descriptors, but dynamicOffsetCount is %u. It should exactly match the number of dynamic descriptors.", setCount, totalDynamicDescriptors, dynamicOffsetCount);
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_INVALID_DYNAMIC_OFFSET_COUNT, "DS",
+ "Attempting to bind %u descriptorSets with %u dynamic "
+ "descriptors, but dynamicOffsetCount is %u. It should "
+ "exactly match the number of dynamic descriptors.",
+ setCount, totalDynamicDescriptors, dynamicOffsetCount);
}
if (dynamicOffsetCount) {
// Save dynamicOffsets bound to this CB
- pCB->dynamicOffsets.assign(pDynamicOffsets, pDynamicOffsets + dynamicOffsetCount);
+ pCB->dynamicOffsets.assign(
+ pDynamicOffsets, pDynamicOffsets + dynamicOffsetCount);
}
}
} else {
- skipCall |= report_error_no_cb_begin(dev_data, commandBuffer, "vkCmdBindDescriptorSets()");
+ skipCall |= report_error_no_cb_begin(dev_data, commandBuffer,
+ "vkCmdBindDescriptorSets()");
}
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+ dev_data->device_dispatch_table->CmdBindDescriptorSets(
+ commandBuffer, pipelineBindPoint, layout, firstSet, setCount,
+ pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, VkIndexType indexType) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_BINDINDEXBUFFER, "vkCmdBindIndexBuffer()");
+ skipCall |= addCmd(dev_data, pCB, CMD_BINDINDEXBUFFER,
+ "vkCmdBindIndexBuffer()");
VkDeviceSize offset_align = 0;
switch (indexType) {
- case VK_INDEX_TYPE_UINT16:
- offset_align = 2;
- break;
- case VK_INDEX_TYPE_UINT32:
- offset_align = 4;
- break;
- default:
- // ParamChecker should catch bad enum, we'll also throw alignment error below if offset_align stays 0
- break;
+ case VK_INDEX_TYPE_UINT16:
+ offset_align = 2;
+ break;
+ case VK_INDEX_TYPE_UINT32:
+ offset_align = 4;
+ break;
+ default:
+ // ParamChecker should catch bad enum, we'll also throw alignment
+ // error below if offset_align stays 0
+ break;
}
if (!offset_align || (offset % offset_align)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_VTX_INDEX_ALIGNMENT_ERROR, "DS",
- "vkCmdBindIndexBuffer() offset (%#" PRIxLEAST64 ") does not fall on alignment (%s) boundary.", offset, string_VkIndexType(indexType));
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VTX_INDEX_ALIGNMENT_ERROR, "DS",
+ "vkCmdBindIndexBuffer() offset (%#" PRIxLEAST64
+ ") does not fall on alignment (%s) boundary.",
+ offset, string_VkIndexType(indexType));
}
pCB->status |= CBSTATUS_INDEX_BUFFER_BOUND;
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+ dev_data->device_dispatch_table->CmdBindIndexBuffer(
+ commandBuffer, buffer, offset, indexType);
}
-void updateResourceTracking(GLOBAL_CB_NODE* pCB, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers) {
+void updateResourceTracking(GLOBAL_CB_NODE *pCB, uint32_t firstBinding,
+ uint32_t bindingCount, const VkBuffer *pBuffers) {
uint32_t end = firstBinding + bindingCount;
if (pCB->currentDrawData.buffers.size() < end) {
pCB->currentDrawData.buffers.resize(end);
@@ -4722,42 +6178,49 @@
}
}
-void updateResourceTrackingOnDraw(GLOBAL_CB_NODE* pCB) {
+void updateResourceTrackingOnDraw(GLOBAL_CB_NODE *pCB) {
pCB->drawData.push_back(pCB->currentDrawData);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer *pBuffers,
- const VkDeviceSize *pOffsets)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
+ uint32_t bindingCount, const VkBuffer *pBuffers,
+ const VkDeviceSize *pOffsets) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
addCmd(dev_data, pCB, CMD_BINDVERTEXBUFFER, "vkCmdBindVertexBuffer()");
updateResourceTracking(pCB, firstBinding, bindingCount, pBuffers);
} else {
- skipCall |= report_error_no_cb_begin(dev_data, commandBuffer, "vkCmdBindVertexBuffer()");
+ skipCall |= report_error_no_cb_begin(dev_data, commandBuffer,
+ "vkCmdBindVertexBuffer()");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+ dev_data->device_dispatch_table->CmdBindVertexBuffers(
+ commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount,
+ uint32_t instanceCount, uint32_t firstVertex,
+ uint32_t firstInstance) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_DRAW, "vkCmdDraw()");
pCB->drawCount[DRAW]++;
skipCall |= validate_draw_state(dev_data, pCB, VK_FALSE);
// TODO : Need to pass commandBuffer as srcObj here
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "vkCmdDraw() call #%" PRIu64 ", reporting DS state:", g_drawCount[DRAW]++);
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_NONE, "DS",
+ "vkCmdDraw() call #%" PRIu64 ", reporting DS state:",
+ g_drawCount[DRAW]++);
skipCall |= synchAndPrintDSConfig(dev_data, commandBuffer);
if (VK_FALSE == skipCall) {
updateResourceTrackingOnDraw(pCB);
@@ -4765,21 +6228,31 @@
skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdDraw");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+ dev_data->device_dispatch_table->CmdDraw(commandBuffer, vertexCount,
+ instanceCount, firstVertex,
+ firstInstance);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
+ uint32_t instanceCount, uint32_t firstIndex,
+ int32_t vertexOffset, uint32_t firstInstance) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
VkBool32 skipCall = VK_FALSE;
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_DRAWINDEXED, "vkCmdDrawIndexed()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_DRAWINDEXED, "vkCmdDrawIndexed()");
pCB->drawCount[DRAW_INDEXED]++;
skipCall |= validate_draw_state(dev_data, pCB, VK_TRUE);
// TODO : Need to pass commandBuffer as srcObj here
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "vkCmdDrawIndexed() call #%" PRIu64 ", reporting DS state:", g_drawCount[DRAW_INDEXED]++);
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_NONE, "DS",
+ "vkCmdDrawIndexed() call #%" PRIu64 ", reporting DS state:",
+ g_drawCount[DRAW_INDEXED]++);
skipCall |= synchAndPrintDSConfig(dev_data, commandBuffer);
if (VK_FALSE == skipCall) {
updateResourceTrackingOnDraw(pCB);
@@ -4787,21 +6260,30 @@
skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdDrawIndexed");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+ dev_data->device_dispatch_table->CmdDrawIndexed(
+ commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset,
+ firstInstance);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, uint32_t count, uint32_t stride) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
VkBool32 skipCall = VK_FALSE;
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_DRAWINDIRECT, "vkCmdDrawIndirect()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_DRAWINDIRECT, "vkCmdDrawIndirect()");
pCB->drawCount[DRAW_INDIRECT]++;
skipCall |= validate_draw_state(dev_data, pCB, VK_FALSE);
// TODO : Need to pass commandBuffer as srcObj here
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "vkCmdDrawIndirect() call #%" PRIu64 ", reporting DS state:", g_drawCount[DRAW_INDIRECT]++);
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_NONE, "DS", "vkCmdDrawIndirect() call #%" PRIu64
+ ", reporting DS state:",
+ g_drawCount[DRAW_INDIRECT]++);
skipCall |= synchAndPrintDSConfig(dev_data, commandBuffer);
if (VK_FALSE == skipCall) {
updateResourceTrackingOnDraw(pCB);
@@ -4809,36 +6291,49 @@
skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdDrawIndirect");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdDrawIndirect(commandBuffer, buffer, offset, count, stride);
+ dev_data->device_dispatch_table->CmdDrawIndirect(commandBuffer, buffer,
+ offset, count, stride);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, uint32_t count,
+ uint32_t stride) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_DRAWINDEXEDINDIRECT, "vkCmdDrawIndexedIndirect()");
+ skipCall |= addCmd(dev_data, pCB, CMD_DRAWINDEXEDINDIRECT,
+ "vkCmdDrawIndexedIndirect()");
pCB->drawCount[DRAW_INDEXED_INDIRECT]++;
skipCall |= validate_draw_state(dev_data, pCB, VK_TRUE);
// TODO : Need to pass commandBuffer as srcObj here
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_NONE, "DS",
- "vkCmdDrawIndexedIndirect() call #%" PRIu64 ", reporting DS state:", g_drawCount[DRAW_INDEXED_INDIRECT]++);
+ skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
+ __LINE__, DRAWSTATE_NONE, "DS",
+ "vkCmdDrawIndexedIndirect() call #%" PRIu64
+ ", reporting DS state:",
+ g_drawCount[DRAW_INDEXED_INDIRECT]++);
skipCall |= synchAndPrintDSConfig(dev_data, commandBuffer);
if (VK_FALSE == skipCall) {
updateResourceTrackingOnDraw(pCB);
}
- skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdDrawIndexedIndirect");
+ skipCall |=
+ outsideRenderPass(dev_data, pCB, "vkCmdDrawIndexedIndirect");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, count, stride);
+ dev_data->device_dispatch_table->CmdDrawIndexedIndirect(
+ commandBuffer, buffer, offset, count, stride);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y, uint32_t z)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y,
+ uint32_t z) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_DISPATCH, "vkCmdDispatch()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdDispatch");
@@ -4847,33 +6342,42 @@
dev_data->device_dispatch_table->CmdDispatch(commandBuffer, x, y, z);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_DISPATCHINDIRECT, "vkCmdDispatchIndirect()");
+ skipCall |= addCmd(dev_data, pCB, CMD_DISPATCHINDIRECT,
+ "vkCmdDispatchIndirect()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdDispatchIndirect");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdDispatchIndirect(commandBuffer, buffer, offset);
+ dev_data->device_dispatch_table->CmdDispatchIndirect(commandBuffer,
+ buffer, offset);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkBuffer dstBuffer, uint32_t regionCount,
+ const VkBufferCopy *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_COPYBUFFER, "vkCmdCopyBuffer()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdCopyBuffer");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+ dev_data->device_dispatch_table->CmdCopyBuffer(
+ commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
}
-VkBool32 VerifySourceImageLayout(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout) {
+VkBool32 VerifySourceImageLayout(VkCommandBuffer cmdBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout) {
VkBool32 skip_call = VK_FALSE;
#ifdef DISABLE_IMAGE_LAYOUT_VALIDATION
@@ -4881,8 +6385,9 @@
return skip_call;
#endif // DISABLE_IMAGE_LAYOUT_VALIDATION
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, cmdBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, cmdBuffer);
auto src_image_element = pCB->imageLayoutMap.find(srcImage);
if (src_image_element == pCB->imageLayoutMap.end()) {
pCB->imageLayoutMap[srcImage].initialLayout = srcImageLayout;
@@ -4890,23 +6395,39 @@
return VK_FALSE;
}
if (src_image_element->second.layout != srcImageLayout) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Cannot copy from an image whose source layout is %d and doesn't match the current layout %d.", srcImageLayout, src_image_element->second.layout);
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Cannot copy from an image whose source layout is %d and "
+ "doesn't match the current layout %d.",
+ srcImageLayout, src_image_element->second.layout);
}
if (srcImageLayout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
if (srcImageLayout == VK_IMAGE_LAYOUT_GENERAL) {
- // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning.
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for input image should be TRANSFER_SRC_OPTIMAL instead of GENERAL.");
+ // LAYOUT_GENERAL is allowed, but may not be performance optimal,
+ // flag as perf warning.
+ skip_call |= log_msg(dev_data->report_data,
+ VK_DEBUG_REPORT_PERF_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for input image should be "
+ "TRANSFER_SRC_OPTIMAL instead of GENERAL.");
} else {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for input image is %d but can only be TRANSFER_SRC_OPTIMAL or GENERAL.", srcImageLayout);
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for input image is %d but can only be "
+ "TRANSFER_SRC_OPTIMAL or GENERAL.",
+ srcImageLayout);
}
}
return skip_call;
}
-VkBool32 VerifyDestImageLayout(VkCommandBuffer cmdBuffer, VkImage destImage, VkImageLayout destImageLayout) {
+VkBool32 VerifyDestImageLayout(VkCommandBuffer cmdBuffer, VkImage destImage,
+ VkImageLayout destImageLayout) {
VkBool32 skip_call = VK_FALSE;
#ifdef DISABLE_IMAGE_LAYOUT_VALIDATION
@@ -4914,8 +6435,9 @@
return skip_call;
#endif // DISABLE_IMAGE_LAYOUT_VALIDATION
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, cmdBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, cmdBuffer);
auto dest_image_element = pCB->imageLayoutMap.find(destImage);
if (dest_image_element == pCB->imageLayoutMap.end()) {
pCB->imageLayoutMap[destImage].initialLayout = destImageLayout;
@@ -4923,280 +6445,375 @@
return VK_FALSE;
}
if (dest_image_element->second.layout != destImageLayout) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Cannot copy from an image whose dest layout is %d and doesn't match the current layout %d.", destImageLayout, dest_image_element->second.layout);
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Cannot copy from an image whose dest layout is %d and "
+ "doesn't match the current layout %d.",
+ destImageLayout, dest_image_element->second.layout);
}
if (destImageLayout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
if (destImageLayout == VK_IMAGE_LAYOUT_GENERAL) {
- // LAYOUT_GENERAL is allowed, but may not be performance optimal, flag as perf warning.
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for output image should be TRANSFER_DST_OPTIMAL instead of GENERAL.");
+ // LAYOUT_GENERAL is allowed, but may not be performance optimal,
+ // flag as perf warning.
+ skip_call |= log_msg(dev_data->report_data,
+ VK_DEBUG_REPORT_PERF_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for output image should be "
+ "TRANSFER_DST_OPTIMAL instead of GENERAL.");
} else {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for output image is %d but can only be TRANSFER_DST_OPTIMAL or GENERAL.", destImageLayout);
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for output image is %d but can only be "
+ "TRANSFER_DST_OPTIMAL or GENERAL.",
+ destImageLayout);
}
}
return skip_call;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount, const VkImageCopy* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageCopy *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_COPYIMAGE, "vkCmdCopyImage()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdCopyImage");
- skipCall |= VerifySourceImageLayout(commandBuffer, srcImage, srcImageLayout);
- skipCall |= VerifyDestImageLayout(commandBuffer, dstImage, dstImageLayout);
+ skipCall |=
+ VerifySourceImageLayout(commandBuffer, srcImage, srcImageLayout);
+ skipCall |=
+ VerifyDestImageLayout(commandBuffer, dstImage, dstImageLayout);
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ dev_data->device_dispatch_table->CmdCopyImage(
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer,
- VkImage srcImage, VkImageLayout srcImageLayout,
- VkImage dstImage, VkImageLayout dstImageLayout,
- uint32_t regionCount, const VkImageBlit* pRegions,
- VkFilter filter)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageBlit *pRegions, VkFilter filter) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_BLITIMAGE, "vkCmdBlitImage()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdBlitImage");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+ dev_data->device_dispatch_table->CmdBlitImage(
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions, filter);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage, VkImageLayout dstImageLayout,
- uint32_t regionCount, const VkBufferImageCopy* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkImage dstImage, VkImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VkBufferImageCopy *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_COPYBUFFERTOIMAGE, "vkCmdCopyBufferToImage()");
+ skipCall |= addCmd(dev_data, pCB, CMD_COPYBUFFERTOIMAGE,
+ "vkCmdCopyBufferToImage()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdCopyBufferToImage");
- skipCall |= VerifyDestImageLayout(commandBuffer, dstImage, dstImageLayout);
+ skipCall |=
+ VerifyDestImageLayout(commandBuffer, dstImage, dstImageLayout);
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+ dev_data->device_dispatch_table->CmdCopyBufferToImage(
+ commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount,
+ pRegions);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,
- VkImage srcImage, VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount, const VkBufferImageCopy* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+ uint32_t regionCount,
+ const VkBufferImageCopy *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_COPYIMAGETOBUFFER, "vkCmdCopyImageToBuffer()");
+ skipCall |= addCmd(dev_data, pCB, CMD_COPYIMAGETOBUFFER,
+ "vkCmdCopyImageToBuffer()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdCopyImageToBuffer");
- skipCall |= VerifySourceImageLayout(commandBuffer, srcImage, srcImageLayout);
+ skipCall |=
+ VerifySourceImageLayout(commandBuffer, srcImage, srcImageLayout);
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+ dev_data->device_dispatch_table->CmdCopyImageToBuffer(
+ commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount,
+ pRegions);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const uint32_t* pData)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize dataSize,
+ const uint32_t *pData) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_UPDATEBUFFER, "vkCmdUpdateBuffer()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_UPDATEBUFFER, "vkCmdUpdateBuffer()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdCopyUpdateBuffer");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+ dev_data->device_dispatch_table->CmdUpdateBuffer(
+ commandBuffer, dstBuffer, dstOffset, dataSize, pData);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_FILLBUFFER, "vkCmdFillBuffer()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdCopyFillBuffer");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+ dev_data->device_dispatch_table->CmdFillBuffer(commandBuffer, dstBuffer,
+ dstOffset, size, data);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearAttachments(VkCommandBuffer commandBuffer,
+ uint32_t attachmentCount,
+ const VkClearAttachment *pAttachments,
+ uint32_t rectCount, const VkClearRect *pRects) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_CLEARATTACHMENTS, "vkCmdClearAttachments()");
- // Warn if this is issued prior to Draw Cmd and clearing the entire attachment
- if (!hasDrawCmd(pCB) &&
- (pCB->activeRenderPassBeginInfo.renderArea.extent.width == pRects[0].rect.extent.width) &&
- (pCB->activeRenderPassBeginInfo.renderArea.extent.height == pRects[0].rect.extent.height)) {
+ skipCall |= addCmd(dev_data, pCB, CMD_CLEARATTACHMENTS,
+ "vkCmdClearAttachments()");
+ // Warn if this is issued prior to Draw Cmd and clearing the entire
+ // attachment
+ if (!hasDrawCmd(pCB) &&
+ (pCB->activeRenderPassBeginInfo.renderArea.extent.width ==
+ pRects[0].rect.extent.width) &&
+ (pCB->activeRenderPassBeginInfo.renderArea.extent.height ==
+ pRects[0].rect.extent.height)) {
// TODO : commandBuffer should be srcObj
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_CLEAR_CMD_BEFORE_DRAW, "DS",
- "vkCmdClearAttachments() issued on CB object 0x%" PRIxLEAST64 " prior to any Draw Cmds."
- " It is recommended you use RenderPass LOAD_OP_CLEAR on Attachments prior to any Draw.", (uint64_t)(commandBuffer));
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_CLEAR_CMD_BEFORE_DRAW, "DS",
+ "vkCmdClearAttachments() issued on CB object 0x%" PRIxLEAST64
+ " prior to any Draw Cmds."
+ " It is recommended you use RenderPass LOAD_OP_CLEAR on "
+ "Attachments prior to any Draw.",
+ (uint64_t)(commandBuffer));
}
skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdClearAttachments");
}
// Validate that attachment is in reference list of active subpass
if (pCB->activeRenderPass) {
- const VkRenderPassCreateInfo *pRPCI = dev_data->renderPassMap[pCB->activeRenderPass]->pCreateInfo;
- const VkSubpassDescription *pSD = &pRPCI->pSubpasses[pCB->activeSubpass];
+ const VkRenderPassCreateInfo *pRPCI =
+ dev_data->renderPassMap[pCB->activeRenderPass]->pCreateInfo;
+ const VkSubpassDescription *pSD =
+ &pRPCI->pSubpasses[pCB->activeSubpass];
- for (uint32_t attachment_idx = 0; attachment_idx < attachmentCount; attachment_idx++) {
+ for (uint32_t attachment_idx = 0; attachment_idx < attachmentCount;
+ attachment_idx++) {
const VkClearAttachment *attachment = &pAttachments[attachment_idx];
if (attachment->aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
VkBool32 found = VK_FALSE;
for (uint32_t i = 0; i < pSD->colorAttachmentCount; i++) {
- if (attachment->colorAttachment == pSD->pColorAttachments[i].attachment) {
+ if (attachment->colorAttachment ==
+ pSD->pColorAttachments[i].attachment) {
found = VK_TRUE;
break;
}
}
if (VK_FALSE == found) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, "DS",
- "vkCmdClearAttachments() attachment index %d not found in attachment reference array of active subpass %d",
- attachment->colorAttachment, pCB->activeSubpass);
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, "DS",
+ "vkCmdClearAttachments() attachment index %d not found "
+ "in attachment reference array of active subpass %d",
+ attachment->colorAttachment, pCB->activeSubpass);
}
- } else if (attachment->aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
- if (!pSD->pDepthStencilAttachment || // Says no DS will be used in active subpass
- (pSD->pDepthStencilAttachment->attachment == VK_ATTACHMENT_UNUSED)) { // Says no DS will be used in active subpass
+ } else if (attachment->aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT)) {
+ if (!pSD->pDepthStencilAttachment || // Says no DS will be used
+ // in active subpass
+ (pSD->pDepthStencilAttachment->attachment ==
+ VK_ATTACHMENT_UNUSED)) { // Says no DS will be used in
+ // active subpass
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, "DS",
- "vkCmdClearAttachments() attachment index %d does not match depthStencilAttachment.attachment (%d) found in active subpass %d",
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, "DS",
+ "vkCmdClearAttachments() attachment index %d does not "
+ "match depthStencilAttachment.attachment (%d) found in "
+ "active subpass %d",
attachment->colorAttachment,
- (pSD->pDepthStencilAttachment) ? pSD->pDepthStencilAttachment->attachment : VK_ATTACHMENT_UNUSED,
+ (pSD->pDepthStencilAttachment)
+ ? pSD->pDepthStencilAttachment->attachment
+ : VK_ATTACHMENT_UNUSED,
pCB->activeSubpass);
}
}
}
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+ dev_data->device_dispatch_table->CmdClearAttachments(
+ commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image, VkImageLayout imageLayout,
- const VkClearColorValue *pColor,
- uint32_t rangeCount, const VkImageSubresourceRange* pRanges)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout,
+ const VkClearColorValue *pColor, uint32_t rangeCount,
+ const VkImageSubresourceRange *pRanges) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_CLEARCOLORIMAGE, "vkCmdClearColorImage()");
+ skipCall |= addCmd(dev_data, pCB, CMD_CLEARCOLORIMAGE,
+ "vkCmdClearColorImage()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdClearColorImage");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+ dev_data->device_dispatch_table->CmdClearColorImage(
+ commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image, VkImageLayout imageLayout,
- const VkClearDepthStencilValue *pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout,
+ const VkClearDepthStencilValue *pDepthStencil,
+ uint32_t rangeCount,
+ const VkImageSubresourceRange *pRanges) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_CLEARDEPTHSTENCILIMAGE, "vkCmdClearDepthStencilImage()");
- skipCall |= insideRenderPass(dev_data, pCB, "vkCmdClearDepthStencilImage");
+ skipCall |= addCmd(dev_data, pCB, CMD_CLEARDEPTHSTENCILIMAGE,
+ "vkCmdClearDepthStencilImage()");
+ skipCall |=
+ insideRenderPass(dev_data, pCB, "vkCmdClearDepthStencilImage");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+ dev_data->device_dispatch_table->CmdClearDepthStencilImage(
+ commandBuffer, image, imageLayout, pDepthStencil, rangeCount,
+ pRanges);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer,
- VkImage srcImage, VkImageLayout srcImageLayout,
- VkImage dstImage, VkImageLayout dstImageLayout,
- uint32_t regionCount, const VkImageResolve* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageResolve *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_RESOLVEIMAGE, "vkCmdResolveImage()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_RESOLVEIMAGE, "vkCmdResolveImage()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdResolveImage");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ dev_data->device_dispatch_table->CmdResolveImage(
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+ VkPipelineStageFlags stageMask) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_SETEVENT, "vkCmdSetEvent()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdSetEvent");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdSetEvent(commandBuffer, event, stageMask);
+ dev_data->device_dispatch_table->CmdSetEvent(commandBuffer, event,
+ stageMask);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+ VkPipelineStageFlags stageMask) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_RESETEVENT, "vkCmdResetEvent()");
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdResetEvent");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdResetEvent(commandBuffer, event, stageMask);
+ dev_data->device_dispatch_table->CmdResetEvent(commandBuffer, event,
+ stageMask);
}
-VkBool32 TransitionImageLayouts(VkCommandBuffer cmdBuffer, uint32_t memBarrierCount, const VkImageMemoryBarrier* pImgMemBarriers) {
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, cmdBuffer);
+VkBool32 TransitionImageLayouts(VkCommandBuffer cmdBuffer,
+ uint32_t memBarrierCount,
+ const VkImageMemoryBarrier *pImgMemBarriers) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, cmdBuffer);
VkBool32 skip = VK_FALSE;
#ifdef DISABLE_IMAGE_LAYOUT_VALIDATION
- // TODO: Fix -- pay attention to image subresource ranges -- not all subresources transition at the same time
+ // TODO: Fix -- pay attention to image subresource ranges -- not all
+ // subresources transition at the same time
return skip;
#endif // DISABLE_IMAGE_LAYOUT_VALIDATION
for (uint32_t i = 0; i < memBarrierCount; ++i) {
auto mem_barrier = &pImgMemBarriers[i];
- if (mem_barrier && mem_barrier->sType == VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER) {
+ if (mem_barrier &&
+ mem_barrier->sType == VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER) {
auto image_data = pCB->imageLayoutMap.find(mem_barrier->image);
if (image_data == pCB->imageLayoutMap.end()) {
- pCB->imageLayoutMap[mem_barrier->image].initialLayout = mem_barrier->oldLayout;
- pCB->imageLayoutMap[mem_barrier->image].layout = mem_barrier->newLayout;
+ pCB->imageLayoutMap[mem_barrier->image].initialLayout =
+ mem_barrier->oldLayout;
+ pCB->imageLayoutMap[mem_barrier->image].layout =
+ mem_barrier->newLayout;
} else {
if (image_data->second.layout != mem_barrier->oldLayout) {
- skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "You cannot transition the layout from %d when current layout is %d.", mem_barrier->oldLayout, image_data->second.layout);
+ skip |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "You cannot transition the layout from %d when current "
+ "layout is %d.",
+ mem_barrier->oldLayout, image_data->second.layout);
}
image_data->second.layout = mem_barrier->newLayout;
}
@@ -5206,8 +6823,7 @@
}
// Print readable FlagBits in FlagMask
-std::string string_VkAccessFlags(VkAccessFlags accessMask)
-{
+std::string string_VkAccessFlags(VkAccessFlags accessMask) {
std::string result;
std::string separator;
@@ -5217,7 +6833,8 @@
result = "[";
for (auto i = 0; i < 32; i++) {
if (accessMask & (1 << i)) {
- result = result + separator + string_VkAccessFlagBits((VkAccessFlagBits)(1 << i));
+ result = result + separator +
+ string_VkAccessFlagBits((VkAccessFlagBits)(1 << i));
separator = " | ";
}
}
@@ -5226,115 +6843,181 @@
return result;
}
-// AccessFlags MUST have 'required_bit' set, and may have one or more of 'optional_bits' set.
-// If required_bit is zero, accessMask must have at least one of 'optional_bits' set
-// TODO: Add tracking to ensure that at least one barrier has been set for these layout transitions
-VkBool32 ValidateMaskBits(const layer_data* my_data, VkCommandBuffer cmdBuffer, const VkAccessFlags& accessMask, const VkImageLayout& layout,
- VkAccessFlags required_bit, VkAccessFlags optional_bits, const char* type) {
+// AccessFlags MUST have 'required_bit' set, and may have one or more of
+// 'optional_bits' set.
+// If required_bit is zero, accessMask must have at least one of 'optional_bits'
+// set
+// TODO: Add tracking to ensure that at least one barrier has been set for these
+// layout transitions
+VkBool32 ValidateMaskBits(const layer_data *my_data, VkCommandBuffer cmdBuffer,
+ const VkAccessFlags &accessMask,
+ const VkImageLayout &layout,
+ VkAccessFlags required_bit,
+ VkAccessFlags optional_bits, const char *type) {
VkBool32 skip_call = VK_FALSE;
- if ((accessMask & required_bit) || (!required_bit && (accessMask & optional_bits))) {
+ if ((accessMask & required_bit) ||
+ (!required_bit && (accessMask & optional_bits))) {
if (accessMask & !(required_bit | optional_bits)) {
// TODO: Verify against Valid Use
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
- "Additional bits in %s accessMask %d %s are specified when layout is %s.",
- type, accessMask, string_VkAccessFlags(accessMask).c_str(), string_VkImageLayout(layout));
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_BARRIER, "DS", "Additional bits in %s "
+ "accessMask %d %s are "
+ "specified when layout is %s.",
+ type, accessMask, string_VkAccessFlags(accessMask).c_str(),
+ string_VkImageLayout(layout));
}
} else {
if (!required_bit) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
- "%s AccessMask %d %s must contain at least one of access bits %d %s when layout is %s, unless the app has previously added a barrier for this transition.",
- type, accessMask, string_VkAccessFlags(accessMask).c_str(), optional_bits,
- string_VkAccessFlags(optional_bits).c_str(), string_VkImageLayout(layout));
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_BARRIER, "DS",
+ "%s AccessMask %d %s must contain at least one of access bits "
+ "%d %s when layout is %s, unless the app has previously added "
+ "a barrier for this transition.",
+ type, accessMask, string_VkAccessFlags(accessMask).c_str(),
+ optional_bits, string_VkAccessFlags(optional_bits).c_str(),
+ string_VkImageLayout(layout));
} else {
std::string opt_bits;
if (optional_bits != 0) {
std::stringstream ss;
ss << optional_bits;
- opt_bits = "and may have optional bits " + ss.str() + ' ' + string_VkAccessFlags(optional_bits);
+ opt_bits = "and may have optional bits " + ss.str() + ' ' +
+ string_VkAccessFlags(optional_bits);
}
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
- "%s AccessMask %d %s must have required access bit %d %s %s when layout is %s, unless the app has previously added a barrier for this transition.",
- type, accessMask, string_VkAccessFlags(accessMask).c_str(),
- required_bit, string_VkAccessFlags(required_bit).c_str(),
- opt_bits.c_str(), string_VkImageLayout(layout));
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_BARRIER, "DS",
+ "%s AccessMask %d %s must have required access bit %d %s %s "
+ "when layout is %s, unless the app has previously added a "
+ "barrier for this transition.",
+ type, accessMask, string_VkAccessFlags(accessMask).c_str(),
+ required_bit, string_VkAccessFlags(required_bit).c_str(),
+ opt_bits.c_str(), string_VkImageLayout(layout));
}
}
return skip_call;
}
-VkBool32 ValidateMaskBitsFromLayouts(const layer_data* my_data, VkCommandBuffer cmdBuffer, const VkAccessFlags& accessMask, const VkImageLayout& layout, const char* type) {
+VkBool32 ValidateMaskBitsFromLayouts(const layer_data *my_data,
+ VkCommandBuffer cmdBuffer,
+ const VkAccessFlags &accessMask,
+ const VkImageLayout &layout,
+ const char *type) {
VkBool32 skip_call = VK_FALSE;
switch (layout) {
- case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, type);
- break;
+ case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: {
+ skip_call |=
+ ValidateMaskBits(my_data, cmdBuffer, accessMask, layout,
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: {
+ skip_call |=
+ ValidateMaskBits(my_data, cmdBuffer, accessMask, layout,
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout,
+ VK_ACCESS_TRANSFER_WRITE_BIT, 0, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_PREINITIALIZED: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout,
+ VK_ACCESS_HOST_WRITE_BIT, 0, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: {
+ skip_call |=
+ ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, 0,
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
+ VK_ACCESS_SHADER_READ_BIT,
+ type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, 0,
+ VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
+ VK_ACCESS_SHADER_READ_BIT,
+ type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout,
+ VK_ACCESS_TRANSFER_READ_BIT, 0, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_UNDEFINED: {
+ if (accessMask != 0) {
+ // TODO: Verify against Valid Use section spec
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_BARRIER, "DS", "Additional bits in %s "
+ "accessMask %d %s are "
+ "specified when layout is %s.",
+ type, accessMask, string_VkAccessFlags(accessMask).c_str(),
+ string_VkImageLayout(layout));
}
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, type);
- break;
- }
- case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_TRANSFER_WRITE_BIT, 0, type);
- break;
- }
- case VK_IMAGE_LAYOUT_PREINITIALIZED: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_HOST_WRITE_BIT, 0, type);
- break;
- }
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, 0, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT, type);
- break;
- }
- case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, 0, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT, type);
- break;
- }
- case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_TRANSFER_READ_BIT, 0, type);
- break;
- }
- case VK_IMAGE_LAYOUT_UNDEFINED: {
- if (accessMask != 0) {
- // TODO: Verify against Valid Use section spec
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
- "Additional bits in %s accessMask %d %s are specified when layout is %s.", type, accessMask, string_VkAccessFlags(accessMask).c_str(),
- string_VkImageLayout(layout));
- }
- break;
- }
- case VK_IMAGE_LAYOUT_GENERAL:
- default: {
- break;
- }
+ break;
+ }
+ case VK_IMAGE_LAYOUT_GENERAL:
+ default: { break; }
}
return skip_call;
}
-VkBool32 ValidateBarriers(VkCommandBuffer cmdBuffer, uint32_t memBarrierCount, const VkMemoryBarrier* pMemBarriers, uint32_t imageMemBarrierCount, const VkImageMemoryBarrier *pImageMemBarriers)
-{
+VkBool32 ValidateBarriers(VkCommandBuffer cmdBuffer, uint32_t memBarrierCount,
+ const VkMemoryBarrier *pMemBarriers,
+ uint32_t imageMemBarrierCount,
+ const VkImageMemoryBarrier *pImageMemBarriers) {
VkBool32 skip_call = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, cmdBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, cmdBuffer);
if (pCB->activeRenderPass && memBarrierCount) {
for (uint32_t i = 0; i < memBarrierCount; ++i) {
auto mem_barrier = &pMemBarriers[i];
- if (mem_barrier && mem_barrier->sType != VK_STRUCTURE_TYPE_MEMORY_BARRIER) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
- "Image or Buffers Barriers cannot be used during a render pass.");
+ if (mem_barrier &&
+ mem_barrier->sType != VK_STRUCTURE_TYPE_MEMORY_BARRIER) {
+ skip_call |= log_msg(dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_BARRIER, "DS",
+ "Image or Buffers Barriers cannot be used "
+ "during a render pass.");
}
}
- if (!dev_data->renderPassMap[pCB->activeRenderPass]->hasSelfDependency[pCB->activeSubpass]) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
- "Barriers cannot be set during subpass %d with no self dependency specified.", pCB->activeSubpass);
+ if (!dev_data->renderPassMap[pCB->activeRenderPass]
+ ->hasSelfDependency[pCB->activeSubpass]) {
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_BARRIER, "DS",
+ "Barriers cannot be set during subpass %d with no self "
+ "dependency specified.",
+ pCB->activeSubpass);
}
}
for (uint32_t i = 0; i < imageMemBarrierCount; ++i) {
auto mem_barrier = &pImageMemBarriers[i];
- if (mem_barrier && mem_barrier->sType == VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER) {
- skip_call |= ValidateMaskBitsFromLayouts(dev_data, cmdBuffer, mem_barrier->srcAccessMask, mem_barrier->oldLayout, "Source");
- skip_call |= ValidateMaskBitsFromLayouts(dev_data, cmdBuffer, mem_barrier->dstAccessMask, mem_barrier->newLayout, "Dest");
+ if (mem_barrier &&
+ mem_barrier->sType == VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER) {
+ skip_call |= ValidateMaskBitsFromLayouts(
+ dev_data, cmdBuffer, mem_barrier->srcAccessMask,
+ mem_barrier->oldLayout, "Source");
+ skip_call |= ValidateMaskBitsFromLayouts(
+ dev_data, cmdBuffer, mem_barrier->dstAccessMask,
+ mem_barrier->newLayout, "Dest");
}
}
@@ -5342,91 +7025,115 @@
}
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(
- VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents,
- VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers)
-{
+ VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents,
+ VkPipelineStageFlags sourceStageMask, VkPipelineStageFlags dstStageMask,
+ uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier *pImageMemoryBarriers) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
for (uint32_t i = 0; i < eventCount; ++i) {
pCB->waitedEvents.push_back(pEvents[i]);
}
if (pCB->state == CB_RECORDING) {
- skipCall |= addCmd(dev_data, pCB, CMD_WAITEVENTS, "vkCmdWaitEvents()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_WAITEVENTS, "vkCmdWaitEvents()");
} else {
- skipCall |= report_error_no_cb_begin(dev_data, commandBuffer, "vkCmdWaitEvents()");
+ skipCall |= report_error_no_cb_begin(dev_data, commandBuffer,
+ "vkCmdWaitEvents()");
}
- skipCall |= TransitionImageLayouts(commandBuffer, imageMemoryBarrierCount, pImageMemoryBarriers);
- skipCall |= ValidateBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ skipCall |= TransitionImageLayouts(
+ commandBuffer, imageMemoryBarrierCount, pImageMemoryBarriers);
+ skipCall |=
+ ValidateBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers,
+ imageMemoryBarrierCount, pImageMemoryBarriers);
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdWaitEvents(commandBuffer, eventCount, pEvents, sourceStageMask, dstStageMask,
- memoryBarrierCount, pMemoryBarriers,
- bufferMemoryBarrierCount, pBufferMemoryBarriers,
- imageMemoryBarrierCount, pImageMemoryBarriers);
+ dev_data->device_dispatch_table->CmdWaitEvents(
+ commandBuffer, eventCount, pEvents, sourceStageMask, dstStageMask,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount,
+ pImageMemoryBarriers);
}
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(
- VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers)
-{
+ VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+ uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier *pImageMemoryBarriers) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_PIPELINEBARRIER, "vkCmdPipelineBarrier()");
- skipCall |= TransitionImageLayouts(commandBuffer, imageMemoryBarrierCount, pImageMemoryBarriers);
- skipCall |= ValidateBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ skipCall |= addCmd(dev_data, pCB, CMD_PIPELINEBARRIER,
+ "vkCmdPipelineBarrier()");
+ skipCall |= TransitionImageLayouts(
+ commandBuffer, imageMemoryBarrierCount, pImageMemoryBarriers);
+ skipCall |=
+ ValidateBarriers(commandBuffer, memoryBarrierCount, pMemoryBarriers,
+ imageMemoryBarrierCount, pImageMemoryBarriers);
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
- memoryBarrierCount, pMemoryBarriers,
- bufferMemoryBarrierCount, pBufferMemoryBarriers,
- imageMemoryBarrierCount, pImageMemoryBarriers);
+ dev_data->device_dispatch_table->CmdPipelineBarrier(
+ commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount,
+ pImageMemoryBarriers);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot, VkFlags flags)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t slot, VkFlags flags) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
skipCall |= addCmd(dev_data, pCB, CMD_BEGINQUERY, "vkCmdBeginQuery()");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdBeginQuery(commandBuffer, queryPool, slot, flags);
+ dev_data->device_dispatch_table->CmdBeginQuery(commandBuffer, queryPool,
+ slot, flags);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t slot) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
QueryObject query = {queryPool, slot};
pCB->queryToStateMap[query] = 1;
if (pCB->state == CB_RECORDING) {
skipCall |= addCmd(dev_data, pCB, CMD_ENDQUERY, "VkCmdEndQuery()");
} else {
- skipCall |= report_error_no_cb_begin(dev_data, commandBuffer, "vkCmdEndQuery()");
+ skipCall |= report_error_no_cb_begin(dev_data, commandBuffer,
+ "vkCmdEndQuery()");
}
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdEndQuery(commandBuffer, queryPool, slot);
+ dev_data->device_dispatch_table->CmdEndQuery(commandBuffer, queryPool,
+ slot);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t firstQuery, uint32_t queryCount) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
for (uint32_t i = 0; i < queryCount; i++) {
QueryObject query = {queryPool, firstQuery + i};
@@ -5434,71 +7141,100 @@
pCB->queryToStateMap[query] = 0;
}
if (pCB->state == CB_RECORDING) {
- skipCall |= addCmd(dev_data, pCB, CMD_RESETQUERYPOOL, "VkCmdResetQueryPool()");
+ skipCall |= addCmd(dev_data, pCB, CMD_RESETQUERYPOOL,
+ "VkCmdResetQueryPool()");
} else {
- skipCall |= report_error_no_cb_begin(dev_data, commandBuffer, "vkCmdResetQueryPool()");
+ skipCall |= report_error_no_cb_begin(dev_data, commandBuffer,
+ "vkCmdResetQueryPool()");
}
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdQueryPool");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+ dev_data->device_dispatch_table->CmdResetQueryPool(
+ commandBuffer, queryPool, firstQuery, queryCount);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
- uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset,
- VkDeviceSize stride, VkQueryResultFlags flags)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
+ VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize stride,
+ VkQueryResultFlags flags) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
for (uint32_t i = 0; i < queryCount; i++) {
QueryObject query = {queryPool, firstQuery + i};
- if(!pCB->queryToStateMap[query]) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_QUERY, "DS",
- "Requesting a copy from query to buffer with invalid query: queryPool %" PRIu64 ", index %d", (uint64_t)(queryPool), firstQuery + i);
+ if (!pCB->queryToStateMap[query]) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_QUERY, "DS",
+ "Requesting a copy from query to buffer with invalid "
+ "query: queryPool %" PRIu64 ", index %d",
+ (uint64_t)(queryPool), firstQuery + i);
}
}
if (pCB->state == CB_RECORDING) {
- skipCall |= addCmd(dev_data, pCB, CMD_COPYQUERYPOOLRESULTS, "vkCmdCopyQueryPoolResults()");
+ skipCall |= addCmd(dev_data, pCB, CMD_COPYQUERYPOOLRESULTS,
+ "vkCmdCopyQueryPoolResults()");
} else {
- skipCall |= report_error_no_cb_begin(dev_data, commandBuffer, "vkCmdCopyQueryPoolResults()");
+ skipCall |= report_error_no_cb_begin(dev_data, commandBuffer,
+ "vkCmdCopyQueryPoolResults()");
}
- skipCall |= insideRenderPass(dev_data, pCB, "vkCmdCopyQueryPoolResults");
+ skipCall |=
+ insideRenderPass(dev_data, pCB, "vkCmdCopyQueryPoolResults");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdCopyQueryPoolResults(commandBuffer, queryPool,
- firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+ dev_data->device_dispatch_table->CmdCopyQueryPoolResults(
+ commandBuffer, queryPool, firstQuery, queryCount, dstBuffer,
+ dstOffset, stride, flags);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,
+ VkPipelineStageFlagBits pipelineStage,
+ VkQueryPool queryPool, uint32_t slot) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
QueryObject query = {queryPool, slot};
pCB->queryToStateMap[query] = 1;
if (pCB->state == CB_RECORDING) {
- skipCall |= addCmd(dev_data, pCB, CMD_WRITETIMESTAMP, "vkCmdWriteTimestamp()");
+ skipCall |= addCmd(dev_data, pCB, CMD_WRITETIMESTAMP,
+ "vkCmdWriteTimestamp()");
} else {
- skipCall |= report_error_no_cb_begin(dev_data, commandBuffer, "vkCmdWriteTimestamp()");
+ skipCall |= report_error_no_cb_begin(dev_data, commandBuffer,
+ "vkCmdWriteTimestamp()");
}
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
+ dev_data->device_dispatch_table->CmdWriteTimestamp(
+ commandBuffer, pipelineStage, queryPool, slot);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateFramebuffer(VkDevice device,
+ const VkFramebufferCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkFramebuffer *pFramebuffer) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateFramebuffer(
+ device, pCreateInfo, pAllocator, pFramebuffer);
if (VK_SUCCESS == result) {
// Shadow create info and store in map
- VkFramebufferCreateInfo* localFBCI = new VkFramebufferCreateInfo(*pCreateInfo);
+ VkFramebufferCreateInfo *localFBCI =
+ new VkFramebufferCreateInfo(*pCreateInfo);
if (pCreateInfo->pAttachments) {
- localFBCI->pAttachments = new VkImageView[localFBCI->attachmentCount];
- memcpy((void*)localFBCI->pAttachments, pCreateInfo->pAttachments, localFBCI->attachmentCount*sizeof(VkImageView));
+ localFBCI->pAttachments =
+ new VkImageView[localFBCI->attachmentCount];
+ memcpy((void *)localFBCI->pAttachments, pCreateInfo->pAttachments,
+ localFBCI->attachmentCount * sizeof(VkImageView));
}
dev_data->frameBufferMap[*pFramebuffer] = localFBCI;
}
@@ -5512,16 +7248,22 @@
std::vector<uint32_t> next;
};
-VkBool32 FindDependency(const int index, const int dependent, const std::vector<DAGNode>& subpass_to_node, std::unordered_set<uint32_t>& processed_nodes) {
- // If we have already checked this node we have not found a dependency path so return false.
+VkBool32 FindDependency(const int index, const int dependent,
+ const std::vector<DAGNode> &subpass_to_node,
+ std::unordered_set<uint32_t> &processed_nodes) {
+ // If we have already checked this node we have not found a dependency path
+ // so return false.
if (processed_nodes.count(index))
return VK_FALSE;
processed_nodes.insert(index);
- const DAGNode& node = subpass_to_node[index];
- // Look for a dependency path. If one exists return true else recurse on the previous nodes.
- if (std::find(node.prev.begin(), node.prev.end(), dependent) == node.prev.end()) {
+ const DAGNode &node = subpass_to_node[index];
+ // Look for a dependency path. If one exists return true else recurse on the
+ // previous nodes.
+ if (std::find(node.prev.begin(), node.prev.end(), dependent) ==
+ node.prev.end()) {
for (auto elem : node.prev) {
- if (FindDependency(elem, dependent, subpass_to_node, processed_nodes))
+ if (FindDependency(elem, dependent, subpass_to_node,
+ processed_nodes))
return VK_TRUE;
}
} else {
@@ -5530,29 +7272,48 @@
return VK_FALSE;
}
-VkBool32 CheckDependencyExists(const layer_data* my_data, VkDevice device, const int subpass, const std::vector<uint32_t>& dependent_subpasses, const std::vector<DAGNode>& subpass_to_node, VkBool32& skip_call) {
+VkBool32 CheckDependencyExists(const layer_data *my_data, VkDevice device,
+ const int subpass,
+ const std::vector<uint32_t> &dependent_subpasses,
+ const std::vector<DAGNode> &subpass_to_node,
+ VkBool32 &skip_call) {
VkBool32 result = VK_TRUE;
- // Loop through all subpasses that share the same attachment and make sure a dependency exists
+ // Loop through all subpasses that share the same attachment and make sure a
+ // dependency exists
for (uint32_t k = 0; k < dependent_subpasses.size(); ++k) {
if (subpass == dependent_subpasses[k])
continue;
- const DAGNode& node = subpass_to_node[subpass];
- // Check for a specified dependency between the two nodes. If one exists we are done.
- auto prev_elem = std::find(node.prev.begin(), node.prev.end(), dependent_subpasses[k]);
- auto next_elem = std::find(node.next.begin(), node.next.end(), dependent_subpasses[k]);
+ const DAGNode &node = subpass_to_node[subpass];
+ // Check for a specified dependency between the two nodes. If one exists
+ // we are done.
+ auto prev_elem = std::find(node.prev.begin(), node.prev.end(),
+ dependent_subpasses[k]);
+ auto next_elem = std::find(node.next.begin(), node.next.end(),
+ dependent_subpasses[k]);
if (prev_elem == node.prev.end() && next_elem == node.next.end()) {
- // If no dependency exits an implicit dependency still might. If so, warn and if not throw an error.
+ // If no dependency exits an implicit dependency still might. If so,
+ // warn and if not throw an error.
std::unordered_set<uint32_t> processed_nodes;
- if (FindDependency(subpass, dependent_subpasses[k], subpass_to_node, processed_nodes) ||
- FindDependency(dependent_subpasses[k], subpass, subpass_to_node, processed_nodes)) {
+ if (FindDependency(subpass, dependent_subpasses[k], subpass_to_node,
+ processed_nodes) ||
+ FindDependency(dependent_subpasses[k], subpass, subpass_to_node,
+ processed_nodes)) {
// TODO: Verify against Valid Use section of spec
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
- "A dependency between subpasses %d and %d must exist but only an implicit one is specified.",
- subpass, dependent_subpasses[k]);
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "A dependency between subpasses %d and %d must "
+ "exist but only an implicit one is specified.",
+ subpass, dependent_subpasses[k]);
} else {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
- "A dependency between subpasses %d and %d must exist but one is not specified.",
- subpass, dependent_subpasses[k]);
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "A dependency between subpasses %d and %d must "
+ "exist but one is not specified.",
+ subpass, dependent_subpasses[k]);
result = VK_FALSE;
}
}
@@ -5560,10 +7321,15 @@
return result;
}
-VkBool32 CheckPreserved(const layer_data* my_data, VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const int index, const uint32_t attachment, const std::vector<DAGNode>& subpass_to_node, int depth, VkBool32& skip_call) {
- const DAGNode& node = subpass_to_node[index];
- // If this node writes to the attachment return true as next nodes need to preserve the attachment.
- const VkSubpassDescription& subpass = pCreateInfo->pSubpasses[index];
+VkBool32 CheckPreserved(const layer_data *my_data, VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo,
+ const int index, const uint32_t attachment,
+ const std::vector<DAGNode> &subpass_to_node, int depth,
+ VkBool32 &skip_call) {
+ const DAGNode &node = subpass_to_node[index];
+ // If this node writes to the attachment return true as next nodes need to
+ // preserve the attachment.
+ const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[index];
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
if (attachment == subpass.pColorAttachments[j].attachment)
return VK_TRUE;
@@ -5574,13 +7340,16 @@
return VK_TRUE;
}
VkBool32 result = VK_FALSE;
- // Loop through previous nodes and see if any of them write to the attachment.
+ // Loop through previous nodes and see if any of them write to the
+ // attachment.
for (auto elem : node.prev) {
- result |= CheckPreserved(my_data, device, pCreateInfo, elem, attachment, subpass_to_node, depth + 1, skip_call);
+ result |= CheckPreserved(my_data, device, pCreateInfo, elem, attachment,
+ subpass_to_node, depth + 1, skip_call);
}
- // If the attachment was written to by a previous node than this node needs to preserve it.
+ // If the attachment was written to by a previous node than this node needs
+ // to preserve it.
if (result && depth > 0) {
- const VkSubpassDescription& subpass = pCreateInfo->pSubpasses[index];
+ const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[index];
VkBool32 has_preserved = VK_FALSE;
for (uint32_t j = 0; j < subpass.preserveAttachmentCount; ++j) {
if (subpass.pPreserveAttachments[j] == attachment) {
@@ -5589,61 +7358,96 @@
}
}
if (has_preserved == VK_FALSE) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
- "Attachment %d is used by a later subpass and must be preserved in subpass %d.", attachment, index);
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "Attachment %d is used by a later subpass and must be "
+ "preserved in subpass %d.",
+ attachment, index);
}
}
return result;
}
-VkBool32 ValidateDependencies(const layer_data* my_data, VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const std::vector<DAGNode>& subpass_to_node) {
+VkBool32 ValidateDependencies(const layer_data *my_data, VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo,
+ const std::vector<DAGNode> &subpass_to_node) {
VkBool32 skip_call = VK_FALSE;
- std::vector<std::vector<uint32_t>> output_attachment_to_subpass(pCreateInfo->attachmentCount);
- std::vector<std::vector<uint32_t>> input_attachment_to_subpass(pCreateInfo->attachmentCount);
+ std::vector<std::vector<uint32_t>> output_attachment_to_subpass(
+ pCreateInfo->attachmentCount);
+ std::vector<std::vector<uint32_t>> input_attachment_to_subpass(
+ pCreateInfo->attachmentCount);
// Find for each attachment the subpasses that use them.
for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
- const VkSubpassDescription& subpass = pCreateInfo->pSubpasses[i];
+ const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[i];
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
- input_attachment_to_subpass[subpass.pInputAttachments[j].attachment].push_back(i);
+ input_attachment_to_subpass[subpass.pInputAttachments[j].attachment]
+ .push_back(i);
}
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
- output_attachment_to_subpass[subpass.pColorAttachments[j].attachment].push_back(i);
+ output_attachment_to_subpass[subpass.pColorAttachments[j]
+ .attachment].push_back(i);
}
- if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- output_attachment_to_subpass[subpass.pDepthStencilAttachment->attachment].push_back(i);
+ if (subpass.pDepthStencilAttachment &&
+ subpass.pDepthStencilAttachment->attachment !=
+ VK_ATTACHMENT_UNUSED) {
+ output_attachment_to_subpass[subpass.pDepthStencilAttachment
+ ->attachment].push_back(i);
}
}
// If there is a dependency needed make sure one exists
for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
- const VkSubpassDescription& subpass = pCreateInfo->pSubpasses[i];
- // If the attachment is an input then all subpasses that output must have a dependency relationship
+ const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[i];
+ // If the attachment is an input then all subpasses that output must
+ // have a dependency relationship
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
- const uint32_t& attachment = subpass.pInputAttachments[j].attachment;
- CheckDependencyExists(my_data, device, i, output_attachment_to_subpass[attachment], subpass_to_node, skip_call);
+ const uint32_t &attachment =
+ subpass.pInputAttachments[j].attachment;
+ CheckDependencyExists(my_data, device, i,
+ output_attachment_to_subpass[attachment],
+ subpass_to_node, skip_call);
}
- // If the attachment is an output then all subpasses that use the attachment must have a dependency relationship
+ // If the attachment is an output then all subpasses that use the
+ // attachment must have a dependency relationship
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
- const uint32_t& attachment = subpass.pColorAttachments[j].attachment;
- CheckDependencyExists(my_data, device, i, output_attachment_to_subpass[attachment], subpass_to_node, skip_call);
- CheckDependencyExists(my_data, device, i, input_attachment_to_subpass[attachment], subpass_to_node, skip_call);
+ const uint32_t &attachment =
+ subpass.pColorAttachments[j].attachment;
+ CheckDependencyExists(my_data, device, i,
+ output_attachment_to_subpass[attachment],
+ subpass_to_node, skip_call);
+ CheckDependencyExists(my_data, device, i,
+ input_attachment_to_subpass[attachment],
+ subpass_to_node, skip_call);
}
- if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- const uint32_t& attachment = subpass.pDepthStencilAttachment->attachment;
- CheckDependencyExists(my_data, device, i, output_attachment_to_subpass[attachment], subpass_to_node, skip_call);
- CheckDependencyExists(my_data, device, i, input_attachment_to_subpass[attachment], subpass_to_node, skip_call);
+ if (subpass.pDepthStencilAttachment &&
+ subpass.pDepthStencilAttachment->attachment !=
+ VK_ATTACHMENT_UNUSED) {
+ const uint32_t &attachment =
+ subpass.pDepthStencilAttachment->attachment;
+ CheckDependencyExists(my_data, device, i,
+ output_attachment_to_subpass[attachment],
+ subpass_to_node, skip_call);
+ CheckDependencyExists(my_data, device, i,
+ input_attachment_to_subpass[attachment],
+ subpass_to_node, skip_call);
}
}
- // Loop through implicit dependencies, if this pass reads make sure the attachment is preserved for all passes after it was written.
+ // Loop through implicit dependencies, if this pass reads make sure the
+ // attachment is preserved for all passes after it was written.
for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
- const VkSubpassDescription& subpass = pCreateInfo->pSubpasses[i];
+ const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[i];
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
- CheckPreserved(my_data, device, pCreateInfo, i, subpass.pInputAttachments[j].attachment, subpass_to_node, 0, skip_call);
+ CheckPreserved(my_data, device, pCreateInfo, i,
+ subpass.pInputAttachments[j].attachment,
+ subpass_to_node, 0, skip_call);
}
}
return skip_call;
}
-VkBool32 ValidateLayouts(const layer_data* my_data, VkDevice device, const VkRenderPassCreateInfo* pCreateInfo) {
+VkBool32 ValidateLayouts(const layer_data *my_data, VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo) {
VkBool32 skip = VK_FALSE;
#ifdef DISABLE_IMAGE_LAYOUT_VALIDATION
@@ -5651,42 +7455,80 @@
#endif // DISABLE_IMAGE_LAYOUT_VALIDATION
for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
- const VkSubpassDescription& subpass = pCreateInfo->pSubpasses[i];
+ const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[i];
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
- if (subpass.pInputAttachments[j].layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL &&
- subpass.pInputAttachments[j].layout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
- if (subpass.pInputAttachments[j].layout == VK_IMAGE_LAYOUT_GENERAL) {
- // TODO: Verify Valid Use in spec. I believe this is allowed (valid) but may not be optimal performance
- skip |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL.");
+ if (subpass.pInputAttachments[j].layout !=
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL &&
+ subpass.pInputAttachments[j].layout !=
+ VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
+ if (subpass.pInputAttachments[j].layout ==
+ VK_IMAGE_LAYOUT_GENERAL) {
+ // TODO: Verify Valid Use in spec. I believe this is allowed
+ // (valid) but may not be optimal performance
+ skip |= log_msg(my_data->report_data,
+ VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for input attachment is GENERAL "
+ "but should be READ_ONLY_OPTIMAL.");
} else {
- skip |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for input attachment is %d but can only be READ_ONLY_OPTIMAL or GENERAL.", subpass.pInputAttachments[j].attachment);
+ skip |= log_msg(my_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for input attachment is %d but can "
+ "only be READ_ONLY_OPTIMAL or GENERAL.",
+ subpass.pInputAttachments[j].attachment);
}
}
}
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
- if (subpass.pColorAttachments[j].layout != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
- if (subpass.pColorAttachments[j].layout == VK_IMAGE_LAYOUT_GENERAL) {
- // TODO: Verify Valid Use in spec. I believe this is allowed (valid) but may not be optimal performance
- skip |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL.");
+ if (subpass.pColorAttachments[j].layout !=
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
+ if (subpass.pColorAttachments[j].layout ==
+ VK_IMAGE_LAYOUT_GENERAL) {
+ // TODO: Verify Valid Use in spec. I believe this is allowed
+ // (valid) but may not be optimal performance
+ skip |= log_msg(my_data->report_data,
+ VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for color attachment is GENERAL "
+ "but should be COLOR_ATTACHMENT_OPTIMAL.");
} else {
- skip |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for color attachment is %d but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.", subpass.pColorAttachments[j].attachment);
+ skip |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for color attachment is %d but can only be "
+ "COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
+ subpass.pColorAttachments[j].attachment);
}
}
}
if ((subpass.pDepthStencilAttachment != NULL) &&
- (subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)) {
- if (subpass.pDepthStencilAttachment->layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
- if (subpass.pDepthStencilAttachment->layout == VK_IMAGE_LAYOUT_GENERAL) {
- // TODO: Verify Valid Use in spec. I believe this is allowed (valid) but may not be optimal performance
- skip |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for depth attachment is GENERAL but should be DEPTH_STENCIL_ATTACHMENT_OPTIMAL.");
+ (subpass.pDepthStencilAttachment->attachment !=
+ VK_ATTACHMENT_UNUSED)) {
+ if (subpass.pDepthStencilAttachment->layout !=
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
+ if (subpass.pDepthStencilAttachment->layout ==
+ VK_IMAGE_LAYOUT_GENERAL) {
+ // TODO: Verify Valid Use in spec. I believe this is allowed
+ // (valid) but may not be optimal performance
+ skip |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for depth attachment is GENERAL but should be "
+ "DEPTH_STENCIL_ATTACHMENT_OPTIMAL.");
} else {
- skip |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for depth attachment is %d but can only be DEPTH_STENCIL_ATTACHMENT_OPTIMAL or GENERAL.", subpass.pDepthStencilAttachment->attachment);
+ skip |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for depth attachment is %d but can only be "
+ "DEPTH_STENCIL_ATTACHMENT_OPTIMAL or GENERAL.",
+ subpass.pDepthStencilAttachment->attachment);
}
}
}
@@ -5694,163 +7536,209 @@
return skip;
}
-VkBool32 CreatePassDAG(const layer_data* my_data, VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, std::vector<DAGNode>& subpass_to_node, std::vector<bool>& has_self_dependency) {
+VkBool32 CreatePassDAG(const layer_data *my_data, VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo,
+ std::vector<DAGNode> &subpass_to_node,
+ std::vector<bool> &has_self_dependency) {
VkBool32 skip_call = VK_FALSE;
for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
- DAGNode& subpass_node = subpass_to_node[i];
+ DAGNode &subpass_node = subpass_to_node[i];
subpass_node.pass = i;
}
for (uint32_t i = 0; i < pCreateInfo->dependencyCount; ++i) {
- const VkSubpassDependency& dependency = pCreateInfo->pDependencies[i];
- if (dependency.srcSubpass > dependency.dstSubpass && dependency.srcSubpass != VK_SUBPASS_EXTERNAL && dependency.dstSubpass != VK_SUBPASS_EXTERNAL) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
- "Depedency graph must be specified such that an earlier pass cannot depend on a later pass.");
- } else if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL && dependency.dstSubpass == VK_SUBPASS_EXTERNAL) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
- "The src and dest subpasses cannot both be external.");
+ const VkSubpassDependency &dependency = pCreateInfo->pDependencies[i];
+ if (dependency.srcSubpass > dependency.dstSubpass &&
+ dependency.srcSubpass != VK_SUBPASS_EXTERNAL &&
+ dependency.dstSubpass != VK_SUBPASS_EXTERNAL) {
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "Depedency graph must be specified such that an "
+ "earlier pass cannot depend on a later pass.");
+ } else if (dependency.srcSubpass == VK_SUBPASS_EXTERNAL &&
+ dependency.dstSubpass == VK_SUBPASS_EXTERNAL) {
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "The src and dest subpasses cannot both be external.");
} else if (dependency.srcSubpass == dependency.dstSubpass) {
has_self_dependency[dependency.srcSubpass] = true;
}
if (dependency.dstSubpass != VK_SUBPASS_EXTERNAL) {
- subpass_to_node[dependency.dstSubpass].prev.push_back(dependency.srcSubpass);
+ subpass_to_node[dependency.dstSubpass].prev.push_back(
+ dependency.srcSubpass);
}
if (dependency.srcSubpass != VK_SUBPASS_EXTERNAL) {
- subpass_to_node[dependency.srcSubpass].next.push_back(dependency.dstSubpass);
+ subpass_to_node[dependency.srcSubpass].next.push_back(
+ dependency.dstSubpass);
}
}
return skip_call;
}
// TODOSC : Add intercept of vkCreateShaderModule
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo *pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule *pShaderModule)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateShaderModule(VkDevice device,
+ const VkShaderModuleCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkShaderModule *pShaderModule) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skip_call = VK_FALSE;
if (!shader_is_spirv(pCreateInfo)) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- /* dev */ 0, __LINE__, SHADER_CHECKER_NON_SPIRV_SHADER, "SC",
- "Shader is not SPIR-V");
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ /* dev */ 0, __LINE__, SHADER_CHECKER_NON_SPIRV_SHADER,
+ "SC", "Shader is not SPIR-V");
}
if (VK_FALSE != skip_call)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult res = my_data->device_dispatch_table->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+ VkResult res = my_data->device_dispatch_table->CreateShaderModule(
+ device, pCreateInfo, pAllocator, pShaderModule);
if (res == VK_SUCCESS) {
loader_platform_thread_lock_mutex(&globalLock);
- my_data->shaderModuleMap[*pShaderModule] = new shader_module(pCreateInfo);
+ my_data->shaderModuleMap[*pShaderModule] =
+ new shader_module(pCreateInfo);
loader_platform_thread_unlock_mutex(&globalLock);
}
return res;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateRenderPass(VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkRenderPass *pRenderPass) {
VkBool32 skip_call = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
// Create DAG
std::vector<bool> has_self_dependency(pCreateInfo->subpassCount);
std::vector<DAGNode> subpass_to_node(pCreateInfo->subpassCount);
- skip_call |= CreatePassDAG(dev_data, device, pCreateInfo, subpass_to_node, has_self_dependency);
+ skip_call |= CreatePassDAG(dev_data, device, pCreateInfo, subpass_to_node,
+ has_self_dependency);
// Validate using DAG
- skip_call |= ValidateDependencies(dev_data, device, pCreateInfo, subpass_to_node);
+ skip_call |=
+ ValidateDependencies(dev_data, device, pCreateInfo, subpass_to_node);
skip_call |= ValidateLayouts(dev_data, device, pCreateInfo);
if (VK_FALSE != skip_call) {
return VK_ERROR_VALIDATION_FAILED_EXT;
}
- VkResult result = dev_data->device_dispatch_table->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+ VkResult result = dev_data->device_dispatch_table->CreateRenderPass(
+ device, pCreateInfo, pAllocator, pRenderPass);
if (VK_SUCCESS == result) {
// TODOSC : Merge in tracking of renderpass from ShaderChecker
// Shadow create info and store in map
- VkRenderPassCreateInfo* localRPCI = new VkRenderPassCreateInfo(*pCreateInfo);
+ VkRenderPassCreateInfo *localRPCI =
+ new VkRenderPassCreateInfo(*pCreateInfo);
if (pCreateInfo->pAttachments) {
- localRPCI->pAttachments = new VkAttachmentDescription[localRPCI->attachmentCount];
- memcpy((void*)localRPCI->pAttachments, pCreateInfo->pAttachments, localRPCI->attachmentCount*sizeof(VkAttachmentDescription));
+ localRPCI->pAttachments =
+ new VkAttachmentDescription[localRPCI->attachmentCount];
+ memcpy((void *)localRPCI->pAttachments, pCreateInfo->pAttachments,
+ localRPCI->attachmentCount *
+ sizeof(VkAttachmentDescription));
}
if (pCreateInfo->pSubpasses) {
- localRPCI->pSubpasses = new VkSubpassDescription[localRPCI->subpassCount];
- memcpy((void*)localRPCI->pSubpasses, pCreateInfo->pSubpasses, localRPCI->subpassCount*sizeof(VkSubpassDescription));
+ localRPCI->pSubpasses =
+ new VkSubpassDescription[localRPCI->subpassCount];
+ memcpy((void *)localRPCI->pSubpasses, pCreateInfo->pSubpasses,
+ localRPCI->subpassCount * sizeof(VkSubpassDescription));
for (uint32_t i = 0; i < localRPCI->subpassCount; i++) {
- VkSubpassDescription *subpass = (VkSubpassDescription *) &localRPCI->pSubpasses[i];
- const uint32_t attachmentCount = subpass->inputAttachmentCount +
- subpass->colorAttachmentCount * (1 + (subpass->pResolveAttachments?1:0)) +
- ((subpass->pDepthStencilAttachment) ? 1 : 0) + subpass->preserveAttachmentCount;
- VkAttachmentReference *attachments = new VkAttachmentReference[attachmentCount];
+ VkSubpassDescription *subpass =
+ (VkSubpassDescription *)&localRPCI->pSubpasses[i];
+ const uint32_t attachmentCount =
+ subpass->inputAttachmentCount +
+ subpass->colorAttachmentCount *
+ (1 + (subpass->pResolveAttachments ? 1 : 0)) +
+ ((subpass->pDepthStencilAttachment) ? 1 : 0) +
+ subpass->preserveAttachmentCount;
+ VkAttachmentReference *attachments =
+ new VkAttachmentReference[attachmentCount];
memcpy(attachments, subpass->pInputAttachments,
- sizeof(attachments[0]) * subpass->inputAttachmentCount);
+ sizeof(attachments[0]) * subpass->inputAttachmentCount);
subpass->pInputAttachments = attachments;
attachments += subpass->inputAttachmentCount;
memcpy(attachments, subpass->pColorAttachments,
- sizeof(attachments[0]) * subpass->colorAttachmentCount);
+ sizeof(attachments[0]) * subpass->colorAttachmentCount);
subpass->pColorAttachments = attachments;
attachments += subpass->colorAttachmentCount;
if (subpass->pResolveAttachments) {
memcpy(attachments, subpass->pResolveAttachments,
- sizeof(attachments[0]) * subpass->colorAttachmentCount);
+ sizeof(attachments[0]) *
+ subpass->colorAttachmentCount);
subpass->pResolveAttachments = attachments;
attachments += subpass->colorAttachmentCount;
}
if (subpass->pDepthStencilAttachment) {
memcpy(attachments, subpass->pDepthStencilAttachment,
- sizeof(attachments[0]) * 1);
+ sizeof(attachments[0]) * 1);
subpass->pDepthStencilAttachment = attachments;
attachments += 1;
}
memcpy(attachments, subpass->pPreserveAttachments,
- sizeof(attachments[0]) * subpass->preserveAttachmentCount);
+ sizeof(attachments[0]) *
+ subpass->preserveAttachmentCount);
subpass->pPreserveAttachments = &attachments->attachment;
}
}
if (pCreateInfo->pDependencies) {
- localRPCI->pDependencies = new VkSubpassDependency[localRPCI->dependencyCount];
- memcpy((void*)localRPCI->pDependencies, pCreateInfo->pDependencies, localRPCI->dependencyCount*sizeof(VkSubpassDependency));
+ localRPCI->pDependencies =
+ new VkSubpassDependency[localRPCI->dependencyCount];
+ memcpy((void *)localRPCI->pDependencies, pCreateInfo->pDependencies,
+ localRPCI->dependencyCount * sizeof(VkSubpassDependency));
}
loader_platform_thread_lock_mutex(&globalLock);
dev_data->renderPassMap[*pRenderPass] = new RENDER_PASS_NODE(localRPCI);
- dev_data->renderPassMap[*pRenderPass]->hasSelfDependency = has_self_dependency;
+ dev_data->renderPassMap[*pRenderPass]->hasSelfDependency =
+ has_self_dependency;
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
// Free the renderpass shadow
-static void deleteRenderPasses(layer_data* my_data)
-{
+static void deleteRenderPasses(layer_data *my_data) {
if (my_data->renderPassMap.size() <= 0)
return;
- for (auto ii=my_data->renderPassMap.begin(); ii!=my_data->renderPassMap.end(); ++ii) {
- const VkRenderPassCreateInfo* pRenderPassInfo = (*ii).second->pCreateInfo;
+ for (auto ii = my_data->renderPassMap.begin();
+ ii != my_data->renderPassMap.end(); ++ii) {
+ const VkRenderPassCreateInfo *pRenderPassInfo =
+ (*ii).second->pCreateInfo;
if (pRenderPassInfo->pAttachments) {
- delete[] pRenderPassInfo->pAttachments;
+ delete[] pRenderPassInfo -> pAttachments;
}
if (pRenderPassInfo->pSubpasses) {
- for (uint32_t i=0; i<pRenderPassInfo->subpassCount; ++i) {
+ for (uint32_t i = 0; i < pRenderPassInfo->subpassCount; ++i) {
// Attachements are all allocated in a block, so just need to
// find the first non-null one to delete
if (pRenderPassInfo->pSubpasses[i].pInputAttachments) {
- delete[] pRenderPassInfo->pSubpasses[i].pInputAttachments;
+ delete[] pRenderPassInfo -> pSubpasses[i].pInputAttachments;
} else if (pRenderPassInfo->pSubpasses[i].pColorAttachments) {
- delete[] pRenderPassInfo->pSubpasses[i].pColorAttachments;
+ delete[] pRenderPassInfo -> pSubpasses[i].pColorAttachments;
} else if (pRenderPassInfo->pSubpasses[i].pResolveAttachments) {
- delete[] pRenderPassInfo->pSubpasses[i].pResolveAttachments;
- } else if (pRenderPassInfo->pSubpasses[i].pPreserveAttachments) {
- delete[] pRenderPassInfo->pSubpasses[i].pPreserveAttachments;
+ delete[] pRenderPassInfo -> pSubpasses[i]
+ .pResolveAttachments;
+ } else if (pRenderPassInfo->pSubpasses[i]
+ .pPreserveAttachments) {
+ delete[] pRenderPassInfo -> pSubpasses[i]
+ .pPreserveAttachments;
}
}
- delete[] pRenderPassInfo->pSubpasses;
+ delete[] pRenderPassInfo -> pSubpasses;
}
if (pRenderPassInfo->pDependencies) {
- delete[] pRenderPassInfo->pDependencies;
+ delete[] pRenderPassInfo -> pDependencies;
}
delete pRenderPassInfo;
delete (*ii).second;
@@ -5858,123 +7746,178 @@
my_data->renderPassMap.clear();
}
-VkBool32 VerifyFramebufferAndRenderPassLayouts(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin) {
+VkBool32 VerifyFramebufferAndRenderPassLayouts(
+ VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo *pRenderPassBegin) {
VkBool32 skip_call = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, cmdBuffer);
- const VkRenderPassCreateInfo* pRenderPassInfo = dev_data->renderPassMap[pRenderPassBegin->renderPass]->pCreateInfo;
- const VkFramebufferCreateInfo* pFramebufferInfo = dev_data->frameBufferMap[pRenderPassBegin->framebuffer];
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, cmdBuffer);
+ const VkRenderPassCreateInfo *pRenderPassInfo =
+ dev_data->renderPassMap[pRenderPassBegin->renderPass]->pCreateInfo;
+ const VkFramebufferCreateInfo *pFramebufferInfo =
+ dev_data->frameBufferMap[pRenderPassBegin->framebuffer];
if (pRenderPassInfo->attachmentCount != pFramebufferInfo->attachmentCount) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
- "You cannot start a render pass using a framebuffer with a different number of attachments.");
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "You cannot start a render pass using a framebuffer with a "
+ "different number of attachments.");
}
for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
- const VkImageView& image_view = pFramebufferInfo->pAttachments[i];
- const VkImage& image = dev_data->imageViewMap[image_view]->image;
+ const VkImageView &image_view = pFramebufferInfo->pAttachments[i];
+ const VkImage &image = dev_data->imageViewMap[image_view]->image;
auto image_data = pCB->imageLayoutMap.find(image);
if (image_data == pCB->imageLayoutMap.end()) {
- pCB->imageLayoutMap[image].initialLayout = pRenderPassInfo->pAttachments[i].initialLayout;
- pCB->imageLayoutMap[image].layout = pRenderPassInfo->pAttachments[i].initialLayout;
- } else if (pRenderPassInfo->pAttachments[i].initialLayout != image_data->second.layout) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
- "You cannot start a render pass using attachment %i where the intial layout differs from the starting layout.", i);
+ pCB->imageLayoutMap[image].initialLayout =
+ pRenderPassInfo->pAttachments[i].initialLayout;
+ pCB->imageLayoutMap[image].layout =
+ pRenderPassInfo->pAttachments[i].initialLayout;
+ } else if (pRenderPassInfo->pAttachments[i].initialLayout !=
+ image_data->second.layout) {
+ skip_call |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "You cannot start a render pass using attachment %i where the "
+ "intial layout differs from the starting layout.",
+ i);
}
}
return skip_call;
}
-void TransitionSubpassLayouts(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const int subpass_index) {
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, cmdBuffer);
- auto render_pass_data = dev_data->renderPassMap.find(pRenderPassBegin->renderPass);
+void TransitionSubpassLayouts(VkCommandBuffer cmdBuffer,
+ const VkRenderPassBeginInfo *pRenderPassBegin,
+ const int subpass_index) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, cmdBuffer);
+ auto render_pass_data =
+ dev_data->renderPassMap.find(pRenderPassBegin->renderPass);
if (render_pass_data == dev_data->renderPassMap.end()) {
return;
}
- const VkRenderPassCreateInfo* pRenderPassInfo = render_pass_data->second->pCreateInfo;
- auto framebuffer_data = dev_data->frameBufferMap.find(pRenderPassBegin->framebuffer);
+ const VkRenderPassCreateInfo *pRenderPassInfo =
+ render_pass_data->second->pCreateInfo;
+ auto framebuffer_data =
+ dev_data->frameBufferMap.find(pRenderPassBegin->framebuffer);
if (framebuffer_data == dev_data->frameBufferMap.end()) {
return;
}
- const VkFramebufferCreateInfo* pFramebufferInfo = framebuffer_data->second;
- const VkSubpassDescription& subpass = pRenderPassInfo->pSubpasses[subpass_index];
+ const VkFramebufferCreateInfo *pFramebufferInfo = framebuffer_data->second;
+ const VkSubpassDescription &subpass =
+ pRenderPassInfo->pSubpasses[subpass_index];
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
- const VkImageView& image_view = pFramebufferInfo->pAttachments[subpass.pInputAttachments[j].attachment];
+ const VkImageView &image_view =
+ pFramebufferInfo
+ ->pAttachments[subpass.pInputAttachments[j].attachment];
auto image_view_data = dev_data->imageViewMap.find(image_view);
- if (image_view_data != dev_data->imageViewMap.end()) {
- auto image_layout = pCB->imageLayoutMap.find(image_view_data->second->image);
+ if (image_view_data != dev_data->imageViewMap.end()) {
+ auto image_layout =
+ pCB->imageLayoutMap.find(image_view_data->second->image);
if (image_layout != pCB->imageLayoutMap.end()) {
- image_layout->second.layout = subpass.pInputAttachments[j].layout;
+ image_layout->second.layout =
+ subpass.pInputAttachments[j].layout;
}
}
}
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
- const VkImageView& image_view = pFramebufferInfo->pAttachments[subpass.pColorAttachments[j].attachment];
+ const VkImageView &image_view =
+ pFramebufferInfo
+ ->pAttachments[subpass.pColorAttachments[j].attachment];
auto image_view_data = dev_data->imageViewMap.find(image_view);
- if (image_view_data != dev_data->imageViewMap.end()) {
- auto image_layout = pCB->imageLayoutMap.find(image_view_data->second->image);
+ if (image_view_data != dev_data->imageViewMap.end()) {
+ auto image_layout =
+ pCB->imageLayoutMap.find(image_view_data->second->image);
if (image_layout != pCB->imageLayoutMap.end()) {
- image_layout->second.layout = subpass.pColorAttachments[j].layout;
+ image_layout->second.layout =
+ subpass.pColorAttachments[j].layout;
}
}
}
if ((subpass.pDepthStencilAttachment != NULL) &&
(subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED)) {
- const VkImageView& image_view = pFramebufferInfo->pAttachments[subpass.pDepthStencilAttachment->attachment];
+ const VkImageView &image_view =
+ pFramebufferInfo
+ ->pAttachments[subpass.pDepthStencilAttachment->attachment];
auto image_view_data = dev_data->imageViewMap.find(image_view);
- if (image_view_data != dev_data->imageViewMap.end()) {
- auto image_layout = pCB->imageLayoutMap.find(image_view_data->second->image);
+ if (image_view_data != dev_data->imageViewMap.end()) {
+ auto image_layout =
+ pCB->imageLayoutMap.find(image_view_data->second->image);
if (image_layout != pCB->imageLayoutMap.end()) {
- image_layout->second.layout = subpass.pDepthStencilAttachment->layout;
+ image_layout->second.layout =
+ subpass.pDepthStencilAttachment->layout;
}
}
}
}
-VkBool32 validatePrimaryCommandBuffer(const layer_data* my_data, const GLOBAL_CB_NODE* pCB, const std::string& cmd_name) {
+VkBool32 validatePrimaryCommandBuffer(const layer_data *my_data,
+ const GLOBAL_CB_NODE *pCB,
+ const std::string &cmd_name) {
VkBool32 skip_call = VK_FALSE;
if (pCB->createInfo.level != VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
- "Cannot execute command %s on a secondary command buffer.", cmd_name.c_str());
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_COMMAND_BUFFER, "DS",
+ "Cannot execute command %s on a secondary command buffer.",
+ cmd_name.c_str());
}
return skip_call;
}
-void TransitionFinalSubpassLayouts(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin) {
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, cmdBuffer);
- auto render_pass_data = dev_data->renderPassMap.find(pRenderPassBegin->renderPass);
+void
+TransitionFinalSubpassLayouts(VkCommandBuffer cmdBuffer,
+ const VkRenderPassBeginInfo *pRenderPassBegin) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, cmdBuffer);
+ auto render_pass_data =
+ dev_data->renderPassMap.find(pRenderPassBegin->renderPass);
if (render_pass_data == dev_data->renderPassMap.end()) {
return;
}
- const VkRenderPassCreateInfo* pRenderPassInfo = render_pass_data->second->pCreateInfo;
- auto framebuffer_data = dev_data->frameBufferMap.find(pRenderPassBegin->framebuffer);
+ const VkRenderPassCreateInfo *pRenderPassInfo =
+ render_pass_data->second->pCreateInfo;
+ auto framebuffer_data =
+ dev_data->frameBufferMap.find(pRenderPassBegin->framebuffer);
if (framebuffer_data == dev_data->frameBufferMap.end()) {
return;
}
- const VkFramebufferCreateInfo* pFramebufferInfo = framebuffer_data->second;
+ const VkFramebufferCreateInfo *pFramebufferInfo = framebuffer_data->second;
for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
- const VkImageView& image_view = pFramebufferInfo->pAttachments[i];
+ const VkImageView &image_view = pFramebufferInfo->pAttachments[i];
auto image_view_data = dev_data->imageViewMap.find(image_view);
- if (image_view_data != dev_data->imageViewMap.end()) {
- auto image_layout = pCB->imageLayoutMap.find(image_view_data->second->image);
+ if (image_view_data != dev_data->imageViewMap.end()) {
+ auto image_layout =
+ pCB->imageLayoutMap.find(image_view_data->second->image);
if (image_layout != pCB->imageLayoutMap.end()) {
- image_layout->second.layout = pRenderPassInfo->pAttachments[i].finalLayout;
+ image_layout->second.layout =
+ pRenderPassInfo->pAttachments[i].finalLayout;
}
}
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,
+ const VkRenderPassBeginInfo *pRenderPassBegin,
+ VkSubpassContents contents) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
if (pRenderPassBegin && pRenderPassBegin->renderPass) {
- skipCall |= VerifyFramebufferAndRenderPassLayouts(commandBuffer, pRenderPassBegin);
+ skipCall |= VerifyFramebufferAndRenderPassLayouts(commandBuffer,
+ pRenderPassBegin);
skipCall |= insideRenderPass(dev_data, pCB, "vkCmdBeginRenderPass");
- skipCall |= validatePrimaryCommandBuffer(dev_data, pCB, "vkCmdBeginRenderPass");
- skipCall |= addCmd(dev_data, pCB, CMD_BEGINRENDERPASS, "vkCmdBeginRenderPass()");
+ skipCall |= validatePrimaryCommandBuffer(dev_data, pCB,
+ "vkCmdBeginRenderPass");
+ skipCall |= addCmd(dev_data, pCB, CMD_BEGINRENDERPASS,
+ "vkCmdBeginRenderPass()");
pCB->activeRenderPass = pRenderPassBegin->renderPass;
// This is a shallow copy as that is all that is needed for now
pCB->activeRenderPassBeginInfo = *pRenderPassBegin;
@@ -5982,50 +7925,69 @@
pCB->activeSubpassContents = contents;
pCB->framebuffer = pRenderPassBegin->framebuffer;
} else {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
- "You cannot use a NULL RenderPass object in vkCmdBeginRenderPass()");
+ skipCall |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "You cannot use a NULL RenderPass object in "
+ "vkCmdBeginRenderPass()");
}
}
if (VK_FALSE == skipCall) {
- dev_data->device_dispatch_table->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+ dev_data->device_dispatch_table->CmdBeginRenderPass(
+ commandBuffer, pRenderPassBegin, contents);
// This is a shallow copy as that is all that is needed for now
dev_data->renderPassBeginInfo = *pRenderPassBegin;
dev_data->currentSubpass = 0;
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdNextSubpass(VkCommandBuffer commandBuffer,
+ VkSubpassContents contents) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
- TransitionSubpassLayouts(commandBuffer, &dev_data->renderPassBeginInfo, ++dev_data->currentSubpass);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
+ TransitionSubpassLayouts(commandBuffer, &dev_data->renderPassBeginInfo,
+ ++dev_data->currentSubpass);
if (pCB) {
- skipCall |= validatePrimaryCommandBuffer(dev_data, pCB, "vkCmdNextSubpass");
- skipCall |= addCmd(dev_data, pCB, CMD_NEXTSUBPASS, "vkCmdNextSubpass()");
+ skipCall |=
+ validatePrimaryCommandBuffer(dev_data, pCB, "vkCmdNextSubpass");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_NEXTSUBPASS, "vkCmdNextSubpass()");
pCB->activeSubpass++;
pCB->activeSubpassContents = contents;
- TransitionSubpassLayouts(commandBuffer, &pCB->activeRenderPassBeginInfo, pCB->activeSubpass);
+ TransitionSubpassLayouts(commandBuffer, &pCB->activeRenderPassBeginInfo,
+ pCB->activeSubpass);
if (pCB->lastBoundPipeline) {
- skipCall |= validatePipelineState(dev_data, pCB, VK_PIPELINE_BIND_POINT_GRAPHICS, pCB->lastBoundPipeline);
+ skipCall |= validatePipelineState(dev_data, pCB,
+ VK_PIPELINE_BIND_POINT_GRAPHICS,
+ pCB->lastBoundPipeline);
}
skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdNextSubpass");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdNextSubpass(commandBuffer, contents);
+ dev_data->device_dispatch_table->CmdNextSubpass(commandBuffer,
+ contents);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdEndRenderPass(VkCommandBuffer commandBuffer) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
- TransitionFinalSubpassLayouts(commandBuffer, &dev_data->renderPassBeginInfo);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
+ TransitionFinalSubpassLayouts(commandBuffer,
+ &dev_data->renderPassBeginInfo);
if (pCB) {
skipCall |= outsideRenderPass(dev_data, pCB, "vkCmdEndRenderpass");
- skipCall |= validatePrimaryCommandBuffer(dev_data, pCB, "vkCmdEndRenderPass");
- skipCall |= addCmd(dev_data, pCB, CMD_ENDRENDERPASS, "vkCmdEndRenderPass()");
- TransitionFinalSubpassLayouts(commandBuffer, &pCB->activeRenderPassBeginInfo);
+ skipCall |=
+ validatePrimaryCommandBuffer(dev_data, pCB, "vkCmdEndRenderPass");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_ENDRENDERPASS, "vkCmdEndRenderPass()");
+ TransitionFinalSubpassLayouts(commandBuffer,
+ &pCB->activeRenderPassBeginInfo);
pCB->activeRenderPass = 0;
pCB->activeSubpass = 0;
}
@@ -6033,91 +7995,179 @@
dev_data->device_dispatch_table->CmdEndRenderPass(commandBuffer);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBuffersCount, const VkCommandBuffer* pCommandBuffers)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdExecuteCommands(VkCommandBuffer commandBuffer,
+ uint32_t commandBuffersCount,
+ const VkCommandBuffer *pCommandBuffers) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (pCB) {
- GLOBAL_CB_NODE* pSubCB = NULL;
- for (uint32_t i=0; i<commandBuffersCount; i++) {
+ GLOBAL_CB_NODE *pSubCB = NULL;
+ for (uint32_t i = 0; i < commandBuffersCount; i++) {
pSubCB = getCBNode(dev_data, pCommandBuffers[i]);
if (!pSubCB) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_SECONDARY_COMMAND_BUFFER, "DS",
- "vkCmdExecuteCommands() called w/ invalid Cmd Buffer %p in element %u of pCommandBuffers array.", (void*)pCommandBuffers[i], i);
- } else if (VK_COMMAND_BUFFER_LEVEL_PRIMARY == pSubCB->createInfo.level) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_SECONDARY_COMMAND_BUFFER, "DS",
- "vkCmdExecuteCommands() called w/ Primary Cmd Buffer %p in element %u of pCommandBuffers array. All cmd buffers in pCommandBuffers array must be secondary.", (void*)pCommandBuffers[i], i);
- } else if (pCB->activeRenderPass) { // Secondary CB w/i RenderPass must have *CONTINUE_BIT set
- if (!(pSubCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)pCommandBuffers[i], __LINE__, DRAWSTATE_BEGIN_CB_INVALID_STATE, "DS",
- "vkCmdExecuteCommands(): Secondary Command Buffer (%p) executed within render pass (%#" PRIxLEAST64 ") must have had vkBeginCommandBuffer() called w/ VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set.", (void*)pCommandBuffers[i], (uint64_t)pCB->activeRenderPass);
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_SECONDARY_COMMAND_BUFFER, "DS",
+ "vkCmdExecuteCommands() called w/ invalid Cmd Buffer %p in "
+ "element %u of pCommandBuffers array.",
+ (void *)pCommandBuffers[i], i);
+ } else if (VK_COMMAND_BUFFER_LEVEL_PRIMARY ==
+ pSubCB->createInfo.level) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_SECONDARY_COMMAND_BUFFER, "DS",
+ "vkCmdExecuteCommands() called w/ Primary Cmd Buffer %p in "
+ "element %u of pCommandBuffers array. All cmd buffers in "
+ "pCommandBuffers array must be secondary.",
+ (void *)pCommandBuffers[i], i);
+ } else if (pCB->activeRenderPass) { // Secondary CB w/i RenderPass
+ // must have *CONTINUE_BIT set
+ if (!(pSubCB->beginInfo.flags &
+ VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)pCommandBuffers[i], __LINE__,
+ DRAWSTATE_BEGIN_CB_INVALID_STATE, "DS",
+ "vkCmdExecuteCommands(): Secondary Command Buffer (%p) "
+ "executed within render pass (%#" PRIxLEAST64
+ ") must have had vkBeginCommandBuffer() called w/ "
+ "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set.",
+ (void *)pCommandBuffers[i],
+ (uint64_t)pCB->activeRenderPass);
}
string errorString = "";
- if (!verify_renderpass_compatibility(dev_data, pCB->activeRenderPass, pSubCB->beginInfo.pInheritanceInfo->renderPass, errorString)) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)pCommandBuffers[i], __LINE__, DRAWSTATE_RENDERPASS_INCOMPATIBLE, "DS",
- "vkCmdExecuteCommands(): Secondary Command Buffer (%p) w/ render pass (%#" PRIxLEAST64 ") is incompatible w/ primary command buffer (%p) w/ render pass (%#" PRIxLEAST64 ") due to: %s",
- (void*)pCommandBuffers[i], (uint64_t)pSubCB->beginInfo.pInheritanceInfo->renderPass, (void*)commandBuffer, (uint64_t)pCB->activeRenderPass, errorString.c_str());
+ if (!verify_renderpass_compatibility(
+ dev_data, pCB->activeRenderPass,
+ pSubCB->beginInfo.pInheritanceInfo->renderPass,
+ errorString)) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)pCommandBuffers[i], __LINE__,
+ DRAWSTATE_RENDERPASS_INCOMPATIBLE, "DS",
+ "vkCmdExecuteCommands(): Secondary Command Buffer (%p) "
+ "w/ render pass (%#" PRIxLEAST64
+ ") is incompatible w/ primary command buffer (%p) w/ "
+ "render pass (%#" PRIxLEAST64 ") due to: %s",
+ (void *)pCommandBuffers[i],
+ (uint64_t)
+ pSubCB->beginInfo.pInheritanceInfo->renderPass,
+ (void *)commandBuffer, (uint64_t)pCB->activeRenderPass,
+ errorString.c_str());
}
- // If framebuffer for secondary CB is not NULL, then it must match FB from vkCmdBeginRenderPass()
- // that this CB will be executed in AND framebuffer must have been created w/ RP compatible w/ renderpass
+ // If framebuffer for secondary CB is not NULL, then it must
+ // match FB from vkCmdBeginRenderPass()
+ // that this CB will be executed in AND framebuffer must have
+ // been created w/ RP compatible w/ renderpass
if (pSubCB->beginInfo.pInheritanceInfo->framebuffer) {
- if (pSubCB->beginInfo.pInheritanceInfo->framebuffer != pCB->activeRenderPassBeginInfo.framebuffer) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)pCommandBuffers[i], __LINE__, DRAWSTATE_FRAMEBUFFER_INCOMPATIBLE, "DS",
- "vkCmdExecuteCommands(): Secondary Command Buffer (%p) references framebuffer (%#" PRIxLEAST64 ") that does not match framebuffer (%#" PRIxLEAST64 ") in active renderpass (%#" PRIxLEAST64 ").",
- (void*)pCommandBuffers[i], (uint64_t)pSubCB->beginInfo.pInheritanceInfo->framebuffer, (uint64_t)pCB->activeRenderPassBeginInfo.framebuffer, (uint64_t)pCB->activeRenderPass);
+ if (pSubCB->beginInfo.pInheritanceInfo->framebuffer !=
+ pCB->activeRenderPassBeginInfo.framebuffer) {
+ skipCall |= log_msg(
+ dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)pCommandBuffers[i], __LINE__,
+ DRAWSTATE_FRAMEBUFFER_INCOMPATIBLE, "DS",
+ "vkCmdExecuteCommands(): Secondary Command Buffer "
+ "(%p) references framebuffer (%#" PRIxLEAST64
+ ") that does not match framebuffer (%#" PRIxLEAST64
+ ") in active renderpass (%#" PRIxLEAST64 ").",
+ (void *)pCommandBuffers[i],
+ (uint64_t)
+ pSubCB->beginInfo.pInheritanceInfo->framebuffer,
+ (uint64_t)
+ pCB->activeRenderPassBeginInfo.framebuffer,
+ (uint64_t)pCB->activeRenderPass);
}
}
}
- // Secondary cmdBuffers are considered pending execution starting w/ being recorded
- if (!(pSubCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
- if (dev_data->globalInFlightCmdBuffers.find(pSubCB->commandBuffer) != dev_data->globalInFlightCmdBuffers.end()) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_CB_SIMULTANEOUS_USE, "DS",
- "Attempt to simultaneously execute CB %#" PRIxLEAST64 " w/o VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set!", (uint64_t)(pCB->commandBuffer));
+ // Secondary cmdBuffers are considered pending execution starting w/
+ // being recorded
+ if (!(pSubCB->beginInfo.flags &
+ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
+ if (dev_data->globalInFlightCmdBuffers.find(
+ pSubCB->commandBuffer) !=
+ dev_data->globalInFlightCmdBuffers.end()) {
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)(pCB->commandBuffer), __LINE__,
+ DRAWSTATE_INVALID_CB_SIMULTANEOUS_USE, "DS",
+ "Attempt to simultaneously execute CB %#" PRIxLEAST64
+ " w/o VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT "
+ "set!",
+ (uint64_t)(pCB->commandBuffer));
}
- if (pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
- // Warn that non-simultaneous secondary cmd buffer renders primary non-simultaneous
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(pCommandBuffers[i]), __LINE__, DRAWSTATE_INVALID_CB_SIMULTANEOUS_USE, "DS",
- "vkCmdExecuteCommands(): Secondary Command Buffer (%#" PRIxLEAST64 ") does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set and will cause primary command buffer (%#" PRIxLEAST64 ") to be treated as if it does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set, even though it does.",
- (uint64_t)(pCommandBuffers[i]), (uint64_t)(pCB->commandBuffer));
- pCB->beginInfo.flags &= ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
+ if (pCB->beginInfo.flags &
+ VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
+ // Warn that non-simultaneous secondary cmd buffer renders
+ // primary non-simultaneous
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)(pCommandBuffers[i]), __LINE__,
+ DRAWSTATE_INVALID_CB_SIMULTANEOUS_USE, "DS",
+ "vkCmdExecuteCommands(): Secondary Command Buffer "
+ "(%#" PRIxLEAST64
+ ") does not have "
+ "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set and "
+ "will cause primary command buffer (%#" PRIxLEAST64
+ ") to be treated as if it does not have "
+ "VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set, "
+ "even though it does.",
+ (uint64_t)(pCommandBuffers[i]),
+ (uint64_t)(pCB->commandBuffer));
+ pCB->beginInfo.flags &=
+ ~VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
}
}
pCB->secondaryCommandBuffers.insert(pSubCB->commandBuffer);
dev_data->globalInFlightCmdBuffers.insert(pSubCB->commandBuffer);
}
- skipCall |= validatePrimaryCommandBuffer(dev_data, pCB, "vkCmdExecuteComands");
- skipCall |= addCmd(dev_data, pCB, CMD_EXECUTECOMMANDS, "vkCmdExecuteComands()");
+ skipCall |=
+ validatePrimaryCommandBuffer(dev_data, pCB, "vkCmdExecuteComands");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_EXECUTECOMMANDS, "vkCmdExecuteComands()");
}
if (VK_FALSE == skipCall)
- dev_data->device_dispatch_table->CmdExecuteCommands(commandBuffer, commandBuffersCount, pCommandBuffers);
+ dev_data->device_dispatch_table->CmdExecuteCommands(
+ commandBuffer, commandBuffersCount, pCommandBuffers);
}
VkBool32 ValidateMapImageLayouts(VkDevice device, VkDeviceMemory mem) {
VkBool32 skip_call = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
auto mem_data = dev_data->memImageMap.find(mem);
if (mem_data != dev_data->memImageMap.end()) {
auto image_data = dev_data->imageLayoutMap.find(mem_data->second);
if (image_data != dev_data->imageLayoutMap.end()) {
- if (image_data->second->layout != VK_IMAGE_LAYOUT_PREINITIALIZED && image_data->second->layout != VK_IMAGE_LAYOUT_GENERAL) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Cannot map an image with layout %d. Only GENERAL or PREINITIALIZED are supported.", image_data->second->layout);
+ if (image_data->second->layout != VK_IMAGE_LAYOUT_PREINITIALIZED &&
+ image_data->second->layout != VK_IMAGE_LAYOUT_GENERAL) {
+ skip_call |= log_msg(dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Cannot map an image with layout %d. Only "
+ "GENERAL or PREINITIALIZED are supported.",
+ image_data->second->layout);
}
}
}
return skip_call;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(
- VkDevice device,
- VkDeviceMemory mem,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkFlags flags,
- void **ppData)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset,
+ VkDeviceSize size, VkFlags flags, void **ppData) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skip_call = VK_FALSE;
#ifndef DISABLE_IMAGE_LAYOUT_VALIDATION
@@ -6125,86 +8175,90 @@
#endif // DISABLE_IMAGE_LAYOUT_VALIDATION
if (VK_FALSE == skip_call) {
- return dev_data->device_dispatch_table->MapMemory(device, mem, offset, size, flags, ppData);
+ return dev_data->device_dispatch_table->MapMemory(device, mem, offset,
+ size, flags, ppData);
}
return VK_ERROR_VALIDATION_FAILED_EXT;
}
-VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory mem,
- VkDeviceSize memOffset)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->BindImageMemory(device, image, mem, memOffset);
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image,
+ VkDeviceMemory mem,
+ VkDeviceSize memOffset) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->BindImageMemory(
+ device, image, mem, memOffset);
loader_platform_thread_lock_mutex(&globalLock);
dev_data->memImageMap[mem] = image;
loader_platform_thread_unlock_mutex(&globalLock);
return result;
}
-
VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event) {
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
dev_data->eventMap[event].needsSignaled = false;
VkResult result = dev_data->device_dispatch_table->SetEvent(device, event);
return result;
}
-VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
+ const VkBindSparseInfo *pBindInfo, VkFence fence) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
VkBool32 skip_call = VK_FALSE;
- for (uint32_t bindIdx=0; bindIdx < bindInfoCount; ++bindIdx) {
- const VkBindSparseInfo& bindInfo = pBindInfo[bindIdx];
- for (uint32_t i=0; i < bindInfo.waitSemaphoreCount; ++i) {
+ for (uint32_t bindIdx = 0; bindIdx < bindInfoCount; ++bindIdx) {
+ const VkBindSparseInfo &bindInfo = pBindInfo[bindIdx];
+ for (uint32_t i = 0; i < bindInfo.waitSemaphoreCount; ++i) {
if (dev_data->semaphoreSignaledMap[bindInfo.pWaitSemaphores[i]]) {
dev_data->semaphoreSignaledMap[bindInfo.pWaitSemaphores[i]] = 0;
} else {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_QUEUE_FORWARD_PROGRESS, "DS",
- "Queue %#" PRIx64 " is waiting on semaphore %#" PRIx64 " that has no way to be signaled.",
- (uint64_t)(queue), (uint64_t)(bindInfo.pWaitSemaphores[i]));
+ skip_call |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_QUEUE_FORWARD_PROGRESS, "DS",
+ "Queue %#" PRIx64 " is waiting on semaphore %#" PRIx64
+ " that has no way to be signaled.",
+ (uint64_t)(queue), (uint64_t)(bindInfo.pWaitSemaphores[i]));
}
}
- for (uint32_t i=0; i < bindInfo.signalSemaphoreCount; ++i) {
+ for (uint32_t i = 0; i < bindInfo.signalSemaphoreCount; ++i) {
dev_data->semaphoreSignaledMap[bindInfo.pSignalSemaphores[i]] = 1;
}
}
if (VK_FALSE == skip_call)
- return dev_data->device_dispatch_table->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+ return dev_data->device_dispatch_table->QueueBindSparse(
+ queue, bindInfoCount, pBindInfo, fence);
else
return VK_ERROR_VALIDATION_FAILED_EXT;
}
-VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSemaphore *pSemaphore) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateSemaphore(
+ device, pCreateInfo, pAllocator, pSemaphore);
if (result == VK_SUCCESS) {
dev_data->semaphoreSignaledMap[*pSemaphore] = 0;
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkSwapchainKHR *pSwapchain)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateSwapchainKHR(VkDevice device,
+ const VkSwapchainCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSwapchainKHR *pSwapchain) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->CreateSwapchainKHR(
+ device, pCreateInfo, pAllocator, pSwapchain);
if (VK_SUCCESS == result) {
SWAPCHAIN_NODE *swapchain_data = new SWAPCHAIN_NODE(pCreateInfo);
@@ -6216,19 +8270,20 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks *pAllocator)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
loader_platform_thread_lock_mutex(&globalLock);
- auto swapchain_data = dev_data->device_extensions.swapchainMap.find(swapchain);
+ auto swapchain_data =
+ dev_data->device_extensions.swapchainMap.find(swapchain);
if (swapchain_data != dev_data->device_extensions.swapchainMap.end()) {
if (swapchain_data->second->images.size() > 0) {
for (auto swapchain_image : swapchain_data->second->images) {
- auto image_item = dev_data->imageLayoutMap.find(swapchain_image);
+ auto image_item =
+ dev_data->imageLayoutMap.find(swapchain_image);
if (image_item != dev_data->imageLayoutMap.end())
dev_data->imageLayoutMap.erase(image_item);
}
@@ -6237,26 +8292,28 @@
dev_data->device_extensions.swapchainMap.erase(swapchain);
}
loader_platform_thread_unlock_mutex(&globalLock);
- return dev_data->device_dispatch_table->DestroySwapchainKHR(device, swapchain, pAllocator);
+ return dev_data->device_dispatch_table->DestroySwapchainKHR(
+ device, swapchain, pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pCount,
- VkImage* pSwapchainImages)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->GetSwapchainImagesKHR(device, swapchain, pCount, pSwapchainImages);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
+ uint32_t *pCount, VkImage *pSwapchainImages) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->GetSwapchainImagesKHR(
+ device, swapchain, pCount, pSwapchainImages);
if (result == VK_SUCCESS && pSwapchainImages != NULL) {
// This should never happen and is checked by param checker.
- if (!pCount) return result;
+ if (!pCount)
+ return result;
for (uint32_t i = 0; i < *pCount; ++i) {
- IMAGE_NODE* image_node = new IMAGE_NODE;
+ IMAGE_NODE *image_node = new IMAGE_NODE;
image_node->layout = VK_IMAGE_LAYOUT_UNDEFINED;
loader_platform_thread_lock_mutex(&globalLock);
- auto swapchain_node = dev_data->device_extensions.swapchainMap[swapchain];
+ auto swapchain_node =
+ dev_data->device_extensions.swapchainMap[swapchain];
image_node->format = swapchain_node->createInfo.imageFormat;
swapchain_node->images.push_back(pSwapchainImages[i]);
dev_data->imageLayoutMap[pSwapchainImages[i]] = image_node;
@@ -6266,31 +8323,53 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR* pPresentInfo)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
VkBool32 skip_call = VK_FALSE;
#ifndef DISABLE_IMAGE_LAYOUT_VALIDATION
if (pPresentInfo) {
- for (uint32_t i=0; i < pPresentInfo->waitSemaphoreCount; ++i) {
- if (dev_data->semaphoreSignaledMap[pPresentInfo->pWaitSemaphores[i]]) {
- dev_data->semaphoreSignaledMap[pPresentInfo->pWaitSemaphores[i]] = 0;
+ for (uint32_t i = 0; i < pPresentInfo->waitSemaphoreCount; ++i) {
+ if (dev_data
+ ->semaphoreSignaledMap[pPresentInfo->pWaitSemaphores[i]]) {
+ dev_data
+ ->semaphoreSignaledMap[pPresentInfo->pWaitSemaphores[i]] =
+ 0;
} else {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_QUEUE_FORWARD_PROGRESS, "DS",
- "Queue %#" PRIx64 " is waiting on semaphore %#" PRIx64 " that has no way to be signaled.",
- (uint64_t)(queue), (uint64_t)(pPresentInfo->pWaitSemaphores[i]));
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__,
+ DRAWSTATE_QUEUE_FORWARD_PROGRESS, "DS",
+ "Queue %#" PRIx64 " is waiting on semaphore %#" PRIx64
+ " that has no way to be signaled.",
+ (uint64_t)(queue),
+ (uint64_t)(pPresentInfo->pWaitSemaphores[i]));
}
}
for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
- auto swapchain_data = dev_data->device_extensions.swapchainMap.find(pPresentInfo->pSwapchains[i]);
- if (swapchain_data != dev_data->device_extensions.swapchainMap.end() && pPresentInfo->pImageIndices[i] < swapchain_data->second->images.size()) {
- VkImage image = swapchain_data->second->images[pPresentInfo->pImageIndices[i]];
+ auto swapchain_data = dev_data->device_extensions.swapchainMap.find(
+ pPresentInfo->pSwapchains[i]);
+ if (swapchain_data !=
+ dev_data->device_extensions.swapchainMap.end() &&
+ pPresentInfo->pImageIndices[i] <
+ swapchain_data->second->images.size()) {
+ VkImage image = swapchain_data->second
+ ->images[pPresentInfo->pImageIndices[i]];
auto image_data = dev_data->imageLayoutMap.find(image);
if (image_data != dev_data->imageLayoutMap.end()) {
- if (image_data->second->layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, (uint64_t)queue, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Images passed to present must be in layout PRESENT_SOURCE_KHR but is in %d", image_data->second->layout);
+ if (image_data->second->layout !=
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
+ skip_call |=
+ log_msg(dev_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+ (uint64_t)queue, __LINE__,
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Images passed to present must be in "
+ "layout PRESENT_SOURCE_KHR but is in %d",
+ image_data->second->layout);
}
}
}
@@ -6299,302 +8378,312 @@
#endif // DISABLE_IMAGE_LAYOUT_VALIDATION
if (VK_FALSE == skip_call)
- return dev_data->device_dispatch_table->QueuePresentKHR(queue, pPresentInfo);
+ return dev_data->device_dispatch_table->QueuePresentKHR(queue,
+ pPresentInfo);
return VK_ERROR_VALIDATION_FAILED_EXT;
}
-VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex)
-{
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = dev_data->device_dispatch_table->AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain,
+ uint64_t timeout, VkSemaphore semaphore,
+ VkFence fence, uint32_t *pImageIndex) {
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = dev_data->device_dispatch_table->AcquireNextImageKHR(
+ device, swapchain, timeout, semaphore, fence, pImageIndex);
dev_data->semaphoreSignaledMap[semaphore] = 1;
return result;
}
VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pMsgCallback)
-{
- layer_data* my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDebugReportCallbackEXT *pMsgCallback) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
- VkResult res = pTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
+ VkResult res = pTable->CreateDebugReportCallbackEXT(
+ instance, pCreateInfo, pAllocator, pMsgCallback);
if (VK_SUCCESS == res) {
- res = layer_create_msg_callback(my_data->report_data, pCreateInfo, pAllocator, pMsgCallback);
+ res = layer_create_msg_callback(my_data->report_data, pCreateInfo,
+ pAllocator, pMsgCallback);
}
return res;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT msgCallback,
- const VkAllocationCallbacks* pAllocator)
-{
- layer_data* my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDebugReportCallbackEXT(VkInstance instance,
+ VkDebugReportCallbackEXT msgCallback,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
pTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
layer_destroy_msg_callback(my_data->report_data, msgCallback, pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objType,
- uint64_t object,
- size_t location,
- int32_t msgCode,
- const char* pLayerPrefix,
- const char* pMsg)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- my_data->instance_dispatch_table->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
+ VkDebugReportObjectTypeEXT objType, uint64_t object,
+ size_t location, int32_t msgCode,
+ const char *pLayerPrefix, const char *pMsg) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ my_data->instance_dispatch_table->DebugReportMessageEXT(
+ instance, flags, objType, object, location, msgCode, pLayerPrefix,
+ pMsg);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDbgMarkerBegin(VkCommandBuffer commandBuffer, const char* pMarker)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDbgMarkerBegin(VkCommandBuffer commandBuffer, const char *pMarker) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (!dev_data->device_extensions.debug_marker_enabled) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__, DRAWSTATE_INVALID_EXTENSION, "DS",
- "Attempt to use CmdDbgMarkerBegin but extension disabled!");
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__, DRAWSTATE_INVALID_EXTENSION,
+ "DS", "Attempt to use CmdDbgMarkerBegin but extension disabled!");
return;
} else if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_DBGMARKERBEGIN, "vkCmdDbgMarkerBegin()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_DBGMARKERBEGIN, "vkCmdDbgMarkerBegin()");
}
if (VK_FALSE == skipCall)
- debug_marker_dispatch_table(commandBuffer)->CmdDbgMarkerBegin(commandBuffer, pMarker);
+ debug_marker_dispatch_table(commandBuffer)
+ ->CmdDbgMarkerBegin(commandBuffer, pMarker);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDbgMarkerEnd(VkCommandBuffer commandBuffer)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDbgMarkerEnd(VkCommandBuffer commandBuffer) {
VkBool32 skipCall = VK_FALSE;
- layer_data* dev_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- GLOBAL_CB_NODE* pCB = getCBNode(dev_data, commandBuffer);
+ layer_data *dev_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ GLOBAL_CB_NODE *pCB = getCBNode(dev_data, commandBuffer);
if (!dev_data->device_extensions.debug_marker_enabled) {
- skipCall |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__, DRAWSTATE_INVALID_EXTENSION, "DS",
- "Attempt to use CmdDbgMarkerEnd but extension disabled!");
+ skipCall |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__, DRAWSTATE_INVALID_EXTENSION,
+ "DS", "Attempt to use CmdDbgMarkerEnd but extension disabled!");
return;
} else if (pCB) {
- skipCall |= addCmd(dev_data, pCB, CMD_DBGMARKEREND, "vkCmdDbgMarkerEnd()");
+ skipCall |=
+ addCmd(dev_data, pCB, CMD_DBGMARKEREND, "vkCmdDbgMarkerEnd()");
}
if (VK_FALSE == skipCall)
- debug_marker_dispatch_table(commandBuffer)->CmdDbgMarkerEnd(commandBuffer);
+ debug_marker_dispatch_table(commandBuffer)
+ ->CmdDbgMarkerEnd(commandBuffer);
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice dev, const char* funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
if (!strcmp(funcName, "vkGetDeviceProcAddr"))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
if (!strcmp(funcName, "vkDestroyDevice"))
- return (PFN_vkVoidFunction) vkDestroyDevice;
+ return (PFN_vkVoidFunction)vkDestroyDevice;
if (!strcmp(funcName, "vkQueueSubmit"))
- return (PFN_vkVoidFunction) vkQueueSubmit;
+ return (PFN_vkVoidFunction)vkQueueSubmit;
if (!strcmp(funcName, "vkWaitForFences"))
- return (PFN_vkVoidFunction) vkWaitForFences;
+ return (PFN_vkVoidFunction)vkWaitForFences;
if (!strcmp(funcName, "vkGetFenceStatus"))
- return (PFN_vkVoidFunction) vkGetFenceStatus;
+ return (PFN_vkVoidFunction)vkGetFenceStatus;
if (!strcmp(funcName, "vkQueueWaitIdle"))
- return (PFN_vkVoidFunction) vkQueueWaitIdle;
+ return (PFN_vkVoidFunction)vkQueueWaitIdle;
if (!strcmp(funcName, "vkDeviceWaitIdle"))
- return (PFN_vkVoidFunction) vkDeviceWaitIdle;
+ return (PFN_vkVoidFunction)vkDeviceWaitIdle;
if (!strcmp(funcName, "vkGetDeviceQueue"))
- return (PFN_vkVoidFunction) vkGetDeviceQueue;
+ return (PFN_vkVoidFunction)vkGetDeviceQueue;
if (!strcmp(funcName, "vkDestroyInstance"))
- return (PFN_vkVoidFunction) vkDestroyInstance;
+ return (PFN_vkVoidFunction)vkDestroyInstance;
if (!strcmp(funcName, "vkDestroyDevice"))
- return (PFN_vkVoidFunction) vkDestroyDevice;
+ return (PFN_vkVoidFunction)vkDestroyDevice;
if (!strcmp(funcName, "vkDestroyFence"))
- return (PFN_vkVoidFunction) vkDestroyFence;
+ return (PFN_vkVoidFunction)vkDestroyFence;
if (!strcmp(funcName, "vkDestroySemaphore"))
- return (PFN_vkVoidFunction) vkDestroySemaphore;
+ return (PFN_vkVoidFunction)vkDestroySemaphore;
if (!strcmp(funcName, "vkDestroyEvent"))
- return (PFN_vkVoidFunction) vkDestroyEvent;
+ return (PFN_vkVoidFunction)vkDestroyEvent;
if (!strcmp(funcName, "vkDestroyQueryPool"))
- return (PFN_vkVoidFunction) vkDestroyQueryPool;
+ return (PFN_vkVoidFunction)vkDestroyQueryPool;
if (!strcmp(funcName, "vkDestroyBuffer"))
- return (PFN_vkVoidFunction) vkDestroyBuffer;
+ return (PFN_vkVoidFunction)vkDestroyBuffer;
if (!strcmp(funcName, "vkDestroyBufferView"))
- return (PFN_vkVoidFunction) vkDestroyBufferView;
+ return (PFN_vkVoidFunction)vkDestroyBufferView;
if (!strcmp(funcName, "vkDestroyImage"))
- return (PFN_vkVoidFunction) vkDestroyImage;
+ return (PFN_vkVoidFunction)vkDestroyImage;
if (!strcmp(funcName, "vkDestroyImageView"))
- return (PFN_vkVoidFunction) vkDestroyImageView;
+ return (PFN_vkVoidFunction)vkDestroyImageView;
if (!strcmp(funcName, "vkDestroyShaderModule"))
- return (PFN_vkVoidFunction) vkDestroyShaderModule;
+ return (PFN_vkVoidFunction)vkDestroyShaderModule;
if (!strcmp(funcName, "vkDestroyPipeline"))
- return (PFN_vkVoidFunction) vkDestroyPipeline;
+ return (PFN_vkVoidFunction)vkDestroyPipeline;
if (!strcmp(funcName, "vkDestroyPipelineLayout"))
- return (PFN_vkVoidFunction) vkDestroyPipelineLayout;
+ return (PFN_vkVoidFunction)vkDestroyPipelineLayout;
if (!strcmp(funcName, "vkDestroySampler"))
- return (PFN_vkVoidFunction) vkDestroySampler;
+ return (PFN_vkVoidFunction)vkDestroySampler;
if (!strcmp(funcName, "vkDestroyDescriptorSetLayout"))
- return (PFN_vkVoidFunction) vkDestroyDescriptorSetLayout;
+ return (PFN_vkVoidFunction)vkDestroyDescriptorSetLayout;
if (!strcmp(funcName, "vkDestroyDescriptorPool"))
- return (PFN_vkVoidFunction) vkDestroyDescriptorPool;
+ return (PFN_vkVoidFunction)vkDestroyDescriptorPool;
if (!strcmp(funcName, "vkDestroyFramebuffer"))
- return (PFN_vkVoidFunction) vkDestroyFramebuffer;
+ return (PFN_vkVoidFunction)vkDestroyFramebuffer;
if (!strcmp(funcName, "vkDestroyRenderPass"))
- return (PFN_vkVoidFunction) vkDestroyRenderPass;
+ return (PFN_vkVoidFunction)vkDestroyRenderPass;
if (!strcmp(funcName, "vkCreateBuffer"))
- return (PFN_vkVoidFunction) vkCreateBuffer;
+ return (PFN_vkVoidFunction)vkCreateBuffer;
if (!strcmp(funcName, "vkCreateBufferView"))
- return (PFN_vkVoidFunction) vkCreateBufferView;
+ return (PFN_vkVoidFunction)vkCreateBufferView;
if (!strcmp(funcName, "vkCreateImage"))
- return (PFN_vkVoidFunction) vkCreateImage;
+ return (PFN_vkVoidFunction)vkCreateImage;
if (!strcmp(funcName, "vkCreateImageView"))
- return (PFN_vkVoidFunction) vkCreateImageView;
+ return (PFN_vkVoidFunction)vkCreateImageView;
if (!strcmp(funcName, "CreatePipelineCache"))
- return (PFN_vkVoidFunction) vkCreatePipelineCache;
+ return (PFN_vkVoidFunction)vkCreatePipelineCache;
if (!strcmp(funcName, "DestroyPipelineCache"))
- return (PFN_vkVoidFunction) vkDestroyPipelineCache;
+ return (PFN_vkVoidFunction)vkDestroyPipelineCache;
if (!strcmp(funcName, "GetPipelineCacheData"))
- return (PFN_vkVoidFunction) vkGetPipelineCacheData;
+ return (PFN_vkVoidFunction)vkGetPipelineCacheData;
if (!strcmp(funcName, "MergePipelineCaches"))
- return (PFN_vkVoidFunction) vkMergePipelineCaches;
+ return (PFN_vkVoidFunction)vkMergePipelineCaches;
if (!strcmp(funcName, "vkCreateGraphicsPipelines"))
- return (PFN_vkVoidFunction) vkCreateGraphicsPipelines;
+ return (PFN_vkVoidFunction)vkCreateGraphicsPipelines;
if (!strcmp(funcName, "vkCreateComputePipelines"))
- return (PFN_vkVoidFunction) vkCreateComputePipelines;
+ return (PFN_vkVoidFunction)vkCreateComputePipelines;
if (!strcmp(funcName, "vkCreateSampler"))
- return (PFN_vkVoidFunction) vkCreateSampler;
+ return (PFN_vkVoidFunction)vkCreateSampler;
if (!strcmp(funcName, "vkCreateDescriptorSetLayout"))
- return (PFN_vkVoidFunction) vkCreateDescriptorSetLayout;
+ return (PFN_vkVoidFunction)vkCreateDescriptorSetLayout;
if (!strcmp(funcName, "vkCreatePipelineLayout"))
- return (PFN_vkVoidFunction) vkCreatePipelineLayout;
+ return (PFN_vkVoidFunction)vkCreatePipelineLayout;
if (!strcmp(funcName, "vkCreateDescriptorPool"))
- return (PFN_vkVoidFunction) vkCreateDescriptorPool;
+ return (PFN_vkVoidFunction)vkCreateDescriptorPool;
if (!strcmp(funcName, "vkResetDescriptorPool"))
- return (PFN_vkVoidFunction) vkResetDescriptorPool;
+ return (PFN_vkVoidFunction)vkResetDescriptorPool;
if (!strcmp(funcName, "vkAllocateDescriptorSets"))
- return (PFN_vkVoidFunction) vkAllocateDescriptorSets;
+ return (PFN_vkVoidFunction)vkAllocateDescriptorSets;
if (!strcmp(funcName, "vkFreeDescriptorSets"))
- return (PFN_vkVoidFunction) vkFreeDescriptorSets;
+ return (PFN_vkVoidFunction)vkFreeDescriptorSets;
if (!strcmp(funcName, "vkUpdateDescriptorSets"))
- return (PFN_vkVoidFunction) vkUpdateDescriptorSets;
+ return (PFN_vkVoidFunction)vkUpdateDescriptorSets;
if (!strcmp(funcName, "vkCreateCommandPool"))
- return (PFN_vkVoidFunction) vkCreateCommandPool;
+ return (PFN_vkVoidFunction)vkCreateCommandPool;
if (!strcmp(funcName, "vkDestroyCommandPool"))
- return (PFN_vkVoidFunction) vkDestroyCommandPool;
+ return (PFN_vkVoidFunction)vkDestroyCommandPool;
if (!strcmp(funcName, "vkResetCommandPool"))
- return (PFN_vkVoidFunction) vkResetCommandPool;
+ return (PFN_vkVoidFunction)vkResetCommandPool;
if (!strcmp(funcName, "vkAllocateCommandBuffers"))
- return (PFN_vkVoidFunction) vkAllocateCommandBuffers;
+ return (PFN_vkVoidFunction)vkAllocateCommandBuffers;
if (!strcmp(funcName, "vkFreeCommandBuffers"))
- return (PFN_vkVoidFunction) vkFreeCommandBuffers;
+ return (PFN_vkVoidFunction)vkFreeCommandBuffers;
if (!strcmp(funcName, "vkBeginCommandBuffer"))
- return (PFN_vkVoidFunction) vkBeginCommandBuffer;
+ return (PFN_vkVoidFunction)vkBeginCommandBuffer;
if (!strcmp(funcName, "vkEndCommandBuffer"))
- return (PFN_vkVoidFunction) vkEndCommandBuffer;
+ return (PFN_vkVoidFunction)vkEndCommandBuffer;
if (!strcmp(funcName, "vkResetCommandBuffer"))
- return (PFN_vkVoidFunction) vkResetCommandBuffer;
+ return (PFN_vkVoidFunction)vkResetCommandBuffer;
if (!strcmp(funcName, "vkCmdBindPipeline"))
- return (PFN_vkVoidFunction) vkCmdBindPipeline;
+ return (PFN_vkVoidFunction)vkCmdBindPipeline;
if (!strcmp(funcName, "vkCmdSetViewport"))
- return (PFN_vkVoidFunction) vkCmdSetViewport;
+ return (PFN_vkVoidFunction)vkCmdSetViewport;
if (!strcmp(funcName, "vkCmdSetScissor"))
- return (PFN_vkVoidFunction) vkCmdSetScissor;
+ return (PFN_vkVoidFunction)vkCmdSetScissor;
if (!strcmp(funcName, "vkCmdSetLineWidth"))
- return (PFN_vkVoidFunction) vkCmdSetLineWidth;
+ return (PFN_vkVoidFunction)vkCmdSetLineWidth;
if (!strcmp(funcName, "vkCmdSetDepthBias"))
- return (PFN_vkVoidFunction) vkCmdSetDepthBias;
+ return (PFN_vkVoidFunction)vkCmdSetDepthBias;
if (!strcmp(funcName, "vkCmdSetBlendConstants"))
- return (PFN_vkVoidFunction) vkCmdSetBlendConstants;
+ return (PFN_vkVoidFunction)vkCmdSetBlendConstants;
if (!strcmp(funcName, "vkCmdSetDepthBounds"))
- return (PFN_vkVoidFunction) vkCmdSetDepthBounds;
+ return (PFN_vkVoidFunction)vkCmdSetDepthBounds;
if (!strcmp(funcName, "vkCmdSetStencilCompareMask"))
- return (PFN_vkVoidFunction) vkCmdSetStencilCompareMask;
+ return (PFN_vkVoidFunction)vkCmdSetStencilCompareMask;
if (!strcmp(funcName, "vkCmdSetStencilWriteMask"))
- return (PFN_vkVoidFunction) vkCmdSetStencilWriteMask;
+ return (PFN_vkVoidFunction)vkCmdSetStencilWriteMask;
if (!strcmp(funcName, "vkCmdSetStencilReference"))
- return (PFN_vkVoidFunction) vkCmdSetStencilReference;
+ return (PFN_vkVoidFunction)vkCmdSetStencilReference;
if (!strcmp(funcName, "vkCmdBindDescriptorSets"))
- return (PFN_vkVoidFunction) vkCmdBindDescriptorSets;
+ return (PFN_vkVoidFunction)vkCmdBindDescriptorSets;
if (!strcmp(funcName, "vkCmdBindVertexBuffers"))
- return (PFN_vkVoidFunction) vkCmdBindVertexBuffers;
+ return (PFN_vkVoidFunction)vkCmdBindVertexBuffers;
if (!strcmp(funcName, "vkCmdBindIndexBuffer"))
- return (PFN_vkVoidFunction) vkCmdBindIndexBuffer;
+ return (PFN_vkVoidFunction)vkCmdBindIndexBuffer;
if (!strcmp(funcName, "vkCmdDraw"))
- return (PFN_vkVoidFunction) vkCmdDraw;
+ return (PFN_vkVoidFunction)vkCmdDraw;
if (!strcmp(funcName, "vkCmdDrawIndexed"))
- return (PFN_vkVoidFunction) vkCmdDrawIndexed;
+ return (PFN_vkVoidFunction)vkCmdDrawIndexed;
if (!strcmp(funcName, "vkCmdDrawIndirect"))
- return (PFN_vkVoidFunction) vkCmdDrawIndirect;
+ return (PFN_vkVoidFunction)vkCmdDrawIndirect;
if (!strcmp(funcName, "vkCmdDrawIndexedIndirect"))
- return (PFN_vkVoidFunction) vkCmdDrawIndexedIndirect;
+ return (PFN_vkVoidFunction)vkCmdDrawIndexedIndirect;
if (!strcmp(funcName, "vkCmdDispatch"))
- return (PFN_vkVoidFunction) vkCmdDispatch;
+ return (PFN_vkVoidFunction)vkCmdDispatch;
if (!strcmp(funcName, "vkCmdDispatchIndirect"))
- return (PFN_vkVoidFunction) vkCmdDispatchIndirect;
+ return (PFN_vkVoidFunction)vkCmdDispatchIndirect;
if (!strcmp(funcName, "vkCmdCopyBuffer"))
- return (PFN_vkVoidFunction) vkCmdCopyBuffer;
+ return (PFN_vkVoidFunction)vkCmdCopyBuffer;
if (!strcmp(funcName, "vkCmdCopyImage"))
- return (PFN_vkVoidFunction) vkCmdCopyImage;
+ return (PFN_vkVoidFunction)vkCmdCopyImage;
if (!strcmp(funcName, "vkCmdCopyBufferToImage"))
- return (PFN_vkVoidFunction) vkCmdCopyBufferToImage;
+ return (PFN_vkVoidFunction)vkCmdCopyBufferToImage;
if (!strcmp(funcName, "vkCmdCopyImageToBuffer"))
- return (PFN_vkVoidFunction) vkCmdCopyImageToBuffer;
+ return (PFN_vkVoidFunction)vkCmdCopyImageToBuffer;
if (!strcmp(funcName, "vkCmdUpdateBuffer"))
- return (PFN_vkVoidFunction) vkCmdUpdateBuffer;
+ return (PFN_vkVoidFunction)vkCmdUpdateBuffer;
if (!strcmp(funcName, "vkCmdFillBuffer"))
- return (PFN_vkVoidFunction) vkCmdFillBuffer;
+ return (PFN_vkVoidFunction)vkCmdFillBuffer;
if (!strcmp(funcName, "vkCmdClearColorImage"))
- return (PFN_vkVoidFunction) vkCmdClearColorImage;
+ return (PFN_vkVoidFunction)vkCmdClearColorImage;
if (!strcmp(funcName, "vkCmdClearDepthStencilImage"))
- return (PFN_vkVoidFunction) vkCmdClearDepthStencilImage;
+ return (PFN_vkVoidFunction)vkCmdClearDepthStencilImage;
if (!strcmp(funcName, "vkCmdClearAttachments"))
- return (PFN_vkVoidFunction) vkCmdClearAttachments;
+ return (PFN_vkVoidFunction)vkCmdClearAttachments;
if (!strcmp(funcName, "vkCmdResolveImage"))
- return (PFN_vkVoidFunction) vkCmdResolveImage;
+ return (PFN_vkVoidFunction)vkCmdResolveImage;
if (!strcmp(funcName, "vkCmdSetEvent"))
- return (PFN_vkVoidFunction) vkCmdSetEvent;
+ return (PFN_vkVoidFunction)vkCmdSetEvent;
if (!strcmp(funcName, "vkCmdResetEvent"))
- return (PFN_vkVoidFunction) vkCmdResetEvent;
+ return (PFN_vkVoidFunction)vkCmdResetEvent;
if (!strcmp(funcName, "vkCmdWaitEvents"))
- return (PFN_vkVoidFunction) vkCmdWaitEvents;
+ return (PFN_vkVoidFunction)vkCmdWaitEvents;
if (!strcmp(funcName, "vkCmdPipelineBarrier"))
- return (PFN_vkVoidFunction) vkCmdPipelineBarrier;
+ return (PFN_vkVoidFunction)vkCmdPipelineBarrier;
if (!strcmp(funcName, "vkCmdBeginQuery"))
- return (PFN_vkVoidFunction) vkCmdBeginQuery;
+ return (PFN_vkVoidFunction)vkCmdBeginQuery;
if (!strcmp(funcName, "vkCmdEndQuery"))
- return (PFN_vkVoidFunction) vkCmdEndQuery;
+ return (PFN_vkVoidFunction)vkCmdEndQuery;
if (!strcmp(funcName, "vkCmdResetQueryPool"))
- return (PFN_vkVoidFunction) vkCmdResetQueryPool;
+ return (PFN_vkVoidFunction)vkCmdResetQueryPool;
if (!strcmp(funcName, "vkCmdWriteTimestamp"))
- return (PFN_vkVoidFunction) vkCmdWriteTimestamp;
+ return (PFN_vkVoidFunction)vkCmdWriteTimestamp;
if (!strcmp(funcName, "vkCreateFramebuffer"))
- return (PFN_vkVoidFunction) vkCreateFramebuffer;
+ return (PFN_vkVoidFunction)vkCreateFramebuffer;
if (!strcmp(funcName, "vkCreateShaderModule"))
- return (PFN_vkVoidFunction) vkCreateShaderModule;
+ return (PFN_vkVoidFunction)vkCreateShaderModule;
if (!strcmp(funcName, "vkCreateRenderPass"))
- return (PFN_vkVoidFunction) vkCreateRenderPass;
+ return (PFN_vkVoidFunction)vkCreateRenderPass;
if (!strcmp(funcName, "vkCmdBeginRenderPass"))
- return (PFN_vkVoidFunction) vkCmdBeginRenderPass;
+ return (PFN_vkVoidFunction)vkCmdBeginRenderPass;
if (!strcmp(funcName, "vkCmdNextSubpass"))
- return (PFN_vkVoidFunction) vkCmdNextSubpass;
+ return (PFN_vkVoidFunction)vkCmdNextSubpass;
if (!strcmp(funcName, "vkCmdEndRenderPass"))
- return (PFN_vkVoidFunction) vkCmdEndRenderPass;
+ return (PFN_vkVoidFunction)vkCmdEndRenderPass;
if (!strcmp(funcName, "vkCmdExecuteCommands"))
- return (PFN_vkVoidFunction) vkCmdExecuteCommands;
+ return (PFN_vkVoidFunction)vkCmdExecuteCommands;
if (!strcmp(funcName, "vkSetEvent"))
- return (PFN_vkVoidFunction) vkSetEvent;
+ return (PFN_vkVoidFunction)vkSetEvent;
if (!strcmp(funcName, "vkMapMemory"))
- return (PFN_vkVoidFunction) vkMapMemory;
+ return (PFN_vkVoidFunction)vkMapMemory;
if (!strcmp(funcName, "vkGetQueryPoolResults"))
- return (PFN_vkVoidFunction) vkGetQueryPoolResults;
+ return (PFN_vkVoidFunction)vkGetQueryPoolResults;
if (!strcmp(funcName, "vkBindImageMemory"))
- return (PFN_vkVoidFunction) vkBindImageMemory;
+ return (PFN_vkVoidFunction)vkBindImageMemory;
if (!strcmp(funcName, "vkQueueBindSparse"))
- return (PFN_vkVoidFunction) vkQueueBindSparse;
+ return (PFN_vkVoidFunction)vkQueueBindSparse;
if (!strcmp(funcName, "vkCreateSemaphore"))
- return (PFN_vkVoidFunction) vkCreateSemaphore;
+ return (PFN_vkVoidFunction)vkCreateSemaphore;
if (dev == NULL)
return NULL;
@@ -6602,27 +8691,25 @@
layer_data *dev_data;
dev_data = get_my_data_ptr(get_dispatch_key(dev), layer_data_map);
- if (dev_data->device_extensions.wsi_enabled)
- {
+ if (dev_data->device_extensions.wsi_enabled) {
if (!strcmp(funcName, "vkCreateSwapchainKHR"))
- return (PFN_vkVoidFunction) vkCreateSwapchainKHR;
+ return (PFN_vkVoidFunction)vkCreateSwapchainKHR;
if (!strcmp(funcName, "vkDestroySwapchainKHR"))
- return (PFN_vkVoidFunction) vkDestroySwapchainKHR;
+ return (PFN_vkVoidFunction)vkDestroySwapchainKHR;
if (!strcmp(funcName, "vkGetSwapchainImagesKHR"))
- return (PFN_vkVoidFunction) vkGetSwapchainImagesKHR;
+ return (PFN_vkVoidFunction)vkGetSwapchainImagesKHR;
if (!strcmp(funcName, "vkAcquireNextImageKHR"))
- return (PFN_vkVoidFunction) vkAcquireNextImageKHR;
+ return (PFN_vkVoidFunction)vkAcquireNextImageKHR;
if (!strcmp(funcName, "vkQueuePresentKHR"))
- return (PFN_vkVoidFunction) vkQueuePresentKHR;
+ return (PFN_vkVoidFunction)vkQueuePresentKHR;
}
- VkLayerDispatchTable* pTable = dev_data->device_dispatch_table;
- if (dev_data->device_extensions.debug_marker_enabled)
- {
+ VkLayerDispatchTable *pTable = dev_data->device_dispatch_table;
+ if (dev_data->device_extensions.debug_marker_enabled) {
if (!strcmp(funcName, "vkCmdDbgMarkerBegin"))
- return (PFN_vkVoidFunction) vkCmdDbgMarkerBegin;
+ return (PFN_vkVoidFunction)vkCmdDbgMarkerBegin;
if (!strcmp(funcName, "vkCmdDbgMarkerEnd"))
- return (PFN_vkVoidFunction) vkCmdDbgMarkerEnd;
+ return (PFN_vkVoidFunction)vkCmdDbgMarkerEnd;
}
{
if (pTable->GetDeviceProcAddr == NULL)
@@ -6631,39 +8718,39 @@
}
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
if (!strcmp(funcName, "vkGetInstanceProcAddr"))
- return (PFN_vkVoidFunction) vkGetInstanceProcAddr;
+ return (PFN_vkVoidFunction)vkGetInstanceProcAddr;
if (!strcmp(funcName, "vkGetDeviceProcAddr"))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
if (!strcmp(funcName, "vkCreateInstance"))
- return (PFN_vkVoidFunction) vkCreateInstance;
+ return (PFN_vkVoidFunction)vkCreateInstance;
if (!strcmp(funcName, "vkCreateDevice"))
- return (PFN_vkVoidFunction) vkCreateDevice;
+ return (PFN_vkVoidFunction)vkCreateDevice;
if (!strcmp(funcName, "vkDestroyInstance"))
- return (PFN_vkVoidFunction) vkDestroyInstance;
+ return (PFN_vkVoidFunction)vkDestroyInstance;
if (!strcmp(funcName, "vkEnumerateInstanceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceLayerProperties;
if (!strcmp(funcName, "vkEnumerateInstanceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceExtensionProperties;
if (!strcmp(funcName, "vkEnumerateDeviceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateDeviceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateDeviceLayerProperties;
if (!strcmp(funcName, "vkEnumerateDeviceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateDeviceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateDeviceExtensionProperties;
if (instance == NULL)
return NULL;
PFN_vkVoidFunction fptr;
- layer_data* my_data;
+ layer_data *my_data;
my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
fptr = debug_report_get_instance_proc_addr(my_data->report_data, funcName);
if (fptr)
return fptr;
- VkLayerInstanceDispatchTable* pTable = my_data->instance_dispatch_table;
+ VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
if (pTable->GetInstanceProcAddr == NULL)
return NULL;
return pTable->GetInstanceProcAddr(instance, funcName);
diff --git a/layers/draw_state.h b/layers/draw_state.h
old mode 100755
new mode 100644
index 063905f..171ff4f
--- a/layers/draw_state.h
+++ b/layers/draw_state.h
@@ -41,167 +41,241 @@
using std::vector;
// Draw State ERROR codes
-typedef enum _DRAW_STATE_ERROR
-{
- DRAWSTATE_NONE, // Used for INFO & other non-error messages
- DRAWSTATE_INTERNAL_ERROR, // Error with DrawState internal data structures
- DRAWSTATE_NO_PIPELINE_BOUND, // Unable to identify a bound pipeline
- DRAWSTATE_INVALID_POOL, // Invalid DS pool
- DRAWSTATE_INVALID_SET, // Invalid DS
- DRAWSTATE_INVALID_LAYOUT, // Invalid DS layout
- DRAWSTATE_INVALID_IMAGE_LAYOUT, // Invalid Image layout
- DRAWSTATE_INVALID_PIPELINE, // Invalid Pipeline handle referenced
- DRAWSTATE_INVALID_PIPELINE_LAYOUT, // Invalid PipelineLayout
- DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, // Attempt to create a pipeline with invalid state
- DRAWSTATE_INVALID_COMMAND_BUFFER, // Invalid CommandBuffer referenced
- DRAWSTATE_INVALID_BARRIER, // Invalid Barrier
- DRAWSTATE_INVALID_BUFFER, // Invalid Buffer
- DRAWSTATE_INVALID_QUERY, // Invalid Query
- DRAWSTATE_VTX_INDEX_OUT_OF_BOUNDS, // binding in vkCmdBindVertexData() too large for PSO's pVertexBindingDescriptions array
- DRAWSTATE_VTX_INDEX_ALIGNMENT_ERROR, // binding offset in vkCmdBindIndexBuffer() out of alignment based on indexType
- //DRAWSTATE_MISSING_DOT_PROGRAM, // No "dot" program in order to generate png image
- DRAWSTATE_OUT_OF_MEMORY, // malloc failed
- DRAWSTATE_INVALID_DESCRIPTOR_SET, // Descriptor Set handle is unknown
- DRAWSTATE_DESCRIPTOR_TYPE_MISMATCH, // Type in layout vs. update are not the same
- DRAWSTATE_DESCRIPTOR_STAGEFLAGS_MISMATCH, // StageFlags in layout are not the same throughout a single VkWriteDescriptorSet update
- DRAWSTATE_DESCRIPTOR_UPDATE_OUT_OF_BOUNDS, // Descriptors set for update out of bounds for corresponding layout section
- DRAWSTATE_DESCRIPTOR_POOL_EMPTY, // Attempt to allocate descriptor from a pool with no more descriptors of that type available
- DRAWSTATE_CANT_FREE_FROM_NON_FREE_POOL, // Invalid to call vkFreeDescriptorSets on Sets allocated from a NON_FREE Pool
- DRAWSTATE_INVALID_UPDATE_INDEX, // Index of requested update is invalid for specified descriptors set
- DRAWSTATE_INVALID_UPDATE_STRUCT, // Struct in DS Update tree is of invalid type
- DRAWSTATE_NUM_SAMPLES_MISMATCH, // Number of samples in bound PSO does not match number in FB of current RenderPass
- DRAWSTATE_NO_END_COMMAND_BUFFER, // Must call vkEndCommandBuffer() before QueueSubmit on that commandBuffer
- DRAWSTATE_NO_BEGIN_COMMAND_BUFFER, // Binding cmds or calling End on CB that never had vkBeginCommandBuffer() called on it
- DRAWSTATE_COMMAND_BUFFER_SINGLE_SUBMIT_VIOLATION, // Cmd Buffer created with VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT flag is submitted multiple times
- DRAWSTATE_INVALID_SECONDARY_COMMAND_BUFFER, // vkCmdExecuteCommands() called with a primary commandBuffer in pCommandBuffers array
- DRAWSTATE_VIEWPORT_NOT_BOUND, // Draw submitted with no viewport state bound
- DRAWSTATE_SCISSOR_NOT_BOUND, // Draw submitted with no scissor state bound
- DRAWSTATE_LINE_WIDTH_NOT_BOUND, // Draw submitted with no line width state bound
- DRAWSTATE_DEPTH_BIAS_NOT_BOUND, // Draw submitted with no depth bias state bound
- DRAWSTATE_BLEND_NOT_BOUND, // Draw submitted with no blend state bound when color write enabled
- DRAWSTATE_DEPTH_BOUNDS_NOT_BOUND, // Draw submitted with no depth bounds state bound when depth enabled
- DRAWSTATE_STENCIL_NOT_BOUND, // Draw submitted with no stencil state bound when stencil enabled
- DRAWSTATE_INDEX_BUFFER_NOT_BOUND, // Draw submitted with no depth-stencil state bound when depth write enabled
- DRAWSTATE_PIPELINE_LAYOUTS_INCOMPATIBLE, // Draw submitted PSO Pipeline layout that's not compatible with layout from BindDescriptorSets
- DRAWSTATE_RENDERPASS_INCOMPATIBLE, // Incompatible renderpasses between secondary cmdBuffer and primary cmdBuffer or framebuffer
- DRAWSTATE_FRAMEBUFFER_INCOMPATIBLE, // Incompatible framebuffer between secondary cmdBuffer and active renderPass
- DRAWSTATE_INVALID_RENDERPASS, // Use of a NULL or otherwise invalid RenderPass object
- DRAWSTATE_INVALID_RENDERPASS_CMD, // Invalid cmd submitted while a RenderPass is active
- DRAWSTATE_NO_ACTIVE_RENDERPASS, // Rendering cmd submitted without an active RenderPass
- DRAWSTATE_DESCRIPTOR_SET_NOT_UPDATED, // DescriptorSet bound but it was never updated. This is a warning code.
- DRAWSTATE_DESCRIPTOR_SET_NOT_BOUND, // DescriptorSet used by pipeline at draw time is not bound, or has been disturbed (which would have flagged previous warning)
- DRAWSTATE_INVALID_DYNAMIC_OFFSET_COUNT, // DescriptorSets bound with different number of dynamic descriptors that were included in dynamicOffsetCount
- DRAWSTATE_CLEAR_CMD_BEFORE_DRAW, // Clear cmd issued before any Draw in CommandBuffer, should use RenderPass Ops instead
- DRAWSTATE_BEGIN_CB_INVALID_STATE, // CB state at Begin call is bad. Can be Primary/Secondary CB created with mismatched FB/RP information or CB in RECORDING state
- DRAWSTATE_INVALID_CB_SIMULTANEOUS_USE, // CmdBuffer is being used in violation of VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT rules (i.e. simultaneous use w/o that bit set)
- DRAWSTATE_INVALID_COMMAND_BUFFER_RESET, // Attempting to call Reset (or Begin on recorded cmdBuffer) that was allocated from Pool w/o VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT bit set
- DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, // Count for viewports and scissors mismatch and/or state doesn't match count
- DRAWSTATE_INVALID_IMAGE_ASPECT, // Image aspect is invalid for the current operation
- DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, // Attachment reference must be present in active subpass
+typedef enum _DRAW_STATE_ERROR {
+ DRAWSTATE_NONE, // Used for INFO & other non-error messages
+ DRAWSTATE_INTERNAL_ERROR, // Error with DrawState internal data structures
+ DRAWSTATE_NO_PIPELINE_BOUND, // Unable to identify a bound pipeline
+ DRAWSTATE_INVALID_POOL, // Invalid DS pool
+ DRAWSTATE_INVALID_SET, // Invalid DS
+ DRAWSTATE_INVALID_LAYOUT, // Invalid DS layout
+ DRAWSTATE_INVALID_IMAGE_LAYOUT, // Invalid Image layout
+ DRAWSTATE_INVALID_PIPELINE, // Invalid Pipeline handle referenced
+ DRAWSTATE_INVALID_PIPELINE_LAYOUT, // Invalid PipelineLayout
+ DRAWSTATE_INVALID_PIPELINE_CREATE_STATE, // Attempt to create a pipeline
+ // with invalid state
+ DRAWSTATE_INVALID_COMMAND_BUFFER, // Invalid CommandBuffer referenced
+ DRAWSTATE_INVALID_BARRIER, // Invalid Barrier
+ DRAWSTATE_INVALID_BUFFER, // Invalid Buffer
+ DRAWSTATE_INVALID_QUERY, // Invalid Query
+ DRAWSTATE_VTX_INDEX_OUT_OF_BOUNDS, // binding in vkCmdBindVertexData() too
+ // large for PSO's
+ // pVertexBindingDescriptions array
+ DRAWSTATE_VTX_INDEX_ALIGNMENT_ERROR, // binding offset in
+ // vkCmdBindIndexBuffer() out of
+ // alignment based on indexType
+ // DRAWSTATE_MISSING_DOT_PROGRAM, // No "dot" program in order
+ // to generate png image
+ DRAWSTATE_OUT_OF_MEMORY, // malloc failed
+ DRAWSTATE_INVALID_DESCRIPTOR_SET, // Descriptor Set handle is unknown
+ DRAWSTATE_DESCRIPTOR_TYPE_MISMATCH, // Type in layout vs. update are not the
+ // same
+ DRAWSTATE_DESCRIPTOR_STAGEFLAGS_MISMATCH, // StageFlags in layout are not
+ // the same throughout a single
+ // VkWriteDescriptorSet update
+ DRAWSTATE_DESCRIPTOR_UPDATE_OUT_OF_BOUNDS, // Descriptors set for update out
+ // of bounds for corresponding
+ // layout section
+ DRAWSTATE_DESCRIPTOR_POOL_EMPTY, // Attempt to allocate descriptor from a
+ // pool with no more descriptors of that
+ // type available
+ DRAWSTATE_CANT_FREE_FROM_NON_FREE_POOL, // Invalid to call
+ // vkFreeDescriptorSets on Sets
+ // allocated from a NON_FREE Pool
+ DRAWSTATE_INVALID_UPDATE_INDEX, // Index of requested update is invalid for
+ // specified descriptors set
+ DRAWSTATE_INVALID_UPDATE_STRUCT, // Struct in DS Update tree is of invalid
+ // type
+ DRAWSTATE_NUM_SAMPLES_MISMATCH, // Number of samples in bound PSO does not
+ // match number in FB of current RenderPass
+ DRAWSTATE_NO_END_COMMAND_BUFFER, // Must call vkEndCommandBuffer() before
+ // QueueSubmit on that commandBuffer
+ DRAWSTATE_NO_BEGIN_COMMAND_BUFFER, // Binding cmds or calling End on CB that
+ // never had vkBeginCommandBuffer()
+ // called on it
+ DRAWSTATE_COMMAND_BUFFER_SINGLE_SUBMIT_VIOLATION, // Cmd Buffer created with
+ // VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT
+ // flag is submitted
+ // multiple times
+ DRAWSTATE_INVALID_SECONDARY_COMMAND_BUFFER, // vkCmdExecuteCommands() called
+ // with a primary commandBuffer
+ // in pCommandBuffers array
+ DRAWSTATE_VIEWPORT_NOT_BOUND, // Draw submitted with no viewport state bound
+ DRAWSTATE_SCISSOR_NOT_BOUND, // Draw submitted with no scissor state bound
+ DRAWSTATE_LINE_WIDTH_NOT_BOUND, // Draw submitted with no line width state
+ // bound
+ DRAWSTATE_DEPTH_BIAS_NOT_BOUND, // Draw submitted with no depth bias state
+ // bound
+ DRAWSTATE_BLEND_NOT_BOUND, // Draw submitted with no blend state bound when
+ // color write enabled
+ DRAWSTATE_DEPTH_BOUNDS_NOT_BOUND, // Draw submitted with no depth bounds
+ // state bound when depth enabled
+ DRAWSTATE_STENCIL_NOT_BOUND, // Draw submitted with no stencil state bound
+ // when stencil enabled
+ DRAWSTATE_INDEX_BUFFER_NOT_BOUND, // Draw submitted with no depth-stencil
+ // state bound when depth write enabled
+ DRAWSTATE_PIPELINE_LAYOUTS_INCOMPATIBLE, // Draw submitted PSO Pipeline
+ // layout that's not compatible
+ // with layout from
+ // BindDescriptorSets
+ DRAWSTATE_RENDERPASS_INCOMPATIBLE, // Incompatible renderpasses between
+ // secondary cmdBuffer and primary
+ // cmdBuffer or framebuffer
+ DRAWSTATE_FRAMEBUFFER_INCOMPATIBLE, // Incompatible framebuffer between
+ // secondary cmdBuffer and active
+ // renderPass
+ DRAWSTATE_INVALID_RENDERPASS, // Use of a NULL or otherwise invalid
+ // RenderPass object
+ DRAWSTATE_INVALID_RENDERPASS_CMD, // Invalid cmd submitted while a
+ // RenderPass is active
+ DRAWSTATE_NO_ACTIVE_RENDERPASS, // Rendering cmd submitted without an active
+ // RenderPass
+ DRAWSTATE_DESCRIPTOR_SET_NOT_UPDATED, // DescriptorSet bound but it was
+ // never updated. This is a warning
+ // code.
+ DRAWSTATE_DESCRIPTOR_SET_NOT_BOUND, // DescriptorSet used by pipeline at
+ // draw time is not bound, or has been
+ // disturbed (which would have flagged
+ // previous warning)
+ DRAWSTATE_INVALID_DYNAMIC_OFFSET_COUNT, // DescriptorSets bound with
+ // different number of dynamic
+ // descriptors that were included in
+ // dynamicOffsetCount
+ DRAWSTATE_CLEAR_CMD_BEFORE_DRAW, // Clear cmd issued before any Draw in
+ // CommandBuffer, should use RenderPass Ops
+ // instead
+ DRAWSTATE_BEGIN_CB_INVALID_STATE, // CB state at Begin call is bad. Can be
+ // Primary/Secondary CB created with
+ // mismatched FB/RP information or CB in
+ // RECORDING state
+ DRAWSTATE_INVALID_CB_SIMULTANEOUS_USE, // CmdBuffer is being used in
+ // violation of
+ // VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
+ // rules (i.e. simultaneous use w/o
+ // that bit set)
+ DRAWSTATE_INVALID_COMMAND_BUFFER_RESET, // Attempting to call Reset (or
+ // Begin on recorded cmdBuffer) that
+ // was allocated from Pool w/o
+ // VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
+ // bit set
+ DRAWSTATE_VIEWPORT_SCISSOR_MISMATCH, // Count for viewports and scissors
+ // mismatch and/or state doesn't match
+ // count
+ DRAWSTATE_INVALID_IMAGE_ASPECT, // Image aspect is invalid for the current
+ // operation
+ DRAWSTATE_MISSING_ATTACHMENT_REFERENCE, // Attachment reference must be
+ // present in active subpass
DRAWSTATE_INVALID_EXTENSION,
- DRAWSTATE_SAMPLER_DESCRIPTOR_ERROR, // A Descriptor of *_SAMPLER type is being updated with an invalid or bad Sampler
- DRAWSTATE_INCONSISTENT_IMMUTABLE_SAMPLER_UPDATE, // Descriptors of *COMBINED_IMAGE_SAMPLER type are being updated where some, but not all, of the updates use immutable samplers
- DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, // A Descriptor of *_IMAGE or *_ATTACHMENT type is being updated with an invalid or bad ImageView
- DRAWSTATE_BUFFERVIEW_DESCRIPTOR_ERROR, // A Descriptor of *_TEXEL_BUFFER type is being updated with an invalid or bad BufferView
- DRAWSTATE_BUFFERINFO_DESCRIPTOR_ERROR, // A Descriptor of *_[UNIFORM|STORAGE]_BUFFER_[DYNAMIC] type is being updated with an invalid or bad BufferView
- DRAWSTATE_DYNAMIC_OFFSET_OVERFLOW, // At draw time the dynamic offset combined with buffer offset and range oversteps size of buffer
- DRAWSTATE_DOUBLE_DESTROY, // Destroying an object twice
- DRAWSTATE_OBJECT_INUSE, // Destroying or modifying an object in use by a command buffer
- DRAWSTATE_QUEUE_FORWARD_PROGRESS, // Queue cannot guarantee forward progress
- DRAWSTATE_INVALID_UNIFORM_BUFFER_OFFSET, // Dynamic Uniform Buffer Offsets violate device limit
- DRAWSTATE_INVALID_STORAGE_BUFFER_OFFSET, // Dynamic Storage Buffer Offsets violate device limit
+ DRAWSTATE_SAMPLER_DESCRIPTOR_ERROR, // A Descriptor of *_SAMPLER type is
+ // being updated with an invalid or bad
+ // Sampler
+ DRAWSTATE_INCONSISTENT_IMMUTABLE_SAMPLER_UPDATE, // Descriptors of
+ // *COMBINED_IMAGE_SAMPLER
+ // type are being updated
+ // where some, but not all,
+ // of the updates use
+ // immutable samplers
+ DRAWSTATE_IMAGEVIEW_DESCRIPTOR_ERROR, // A Descriptor of *_IMAGE or
+ // *_ATTACHMENT type is being updated
+ // with an invalid or bad ImageView
+ DRAWSTATE_BUFFERVIEW_DESCRIPTOR_ERROR, // A Descriptor of *_TEXEL_BUFFER
+ // type is being updated with an
+ // invalid or bad BufferView
+ DRAWSTATE_BUFFERINFO_DESCRIPTOR_ERROR, // A Descriptor of
+ // *_[UNIFORM|STORAGE]_BUFFER_[DYNAMIC]
+ // type is being updated with an
+ // invalid or bad BufferView
+ DRAWSTATE_DYNAMIC_OFFSET_OVERFLOW, // At draw time the dynamic offset
+ // combined with buffer offset and range
+ // oversteps size of buffer
+ DRAWSTATE_DOUBLE_DESTROY, // Destroying an object twice
+ DRAWSTATE_OBJECT_INUSE, // Destroying or modifying an object in use by a
+ // command buffer
+ DRAWSTATE_QUEUE_FORWARD_PROGRESS, // Queue cannot guarantee forward progress
+ DRAWSTATE_INVALID_UNIFORM_BUFFER_OFFSET, // Dynamic Uniform Buffer Offsets
+ // violate device limit
+ DRAWSTATE_INVALID_STORAGE_BUFFER_OFFSET, // Dynamic Storage Buffer Offsets
+ // violate device limit
} DRAW_STATE_ERROR;
typedef enum _SHADER_CHECKER_ERROR {
SHADER_CHECKER_NONE,
- SHADER_CHECKER_FS_MIXED_BROADCAST, /* FS writes broadcast output AND custom outputs */
- SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, /* Type mismatch between shader stages or shader and pipeline */
- SHADER_CHECKER_OUTPUT_NOT_CONSUMED, /* Entry appears in output interface, but missing in input */
- SHADER_CHECKER_INPUT_NOT_PRODUCED, /* Entry appears in input interface, but missing in output */
- SHADER_CHECKER_NON_SPIRV_SHADER, /* Shader image is not SPIR-V */
- SHADER_CHECKER_INCONSISTENT_SPIRV, /* General inconsistency within a SPIR-V module */
- SHADER_CHECKER_UNKNOWN_STAGE, /* Stage is not supported by analysis */
- SHADER_CHECKER_INCONSISTENT_VI, /* VI state contains conflicting binding or attrib descriptions */
- SHADER_CHECKER_MISSING_DESCRIPTOR, /* Shader attempts to use a descriptor binding not declared in the layout */
+ SHADER_CHECKER_FS_MIXED_BROADCAST, /* FS writes broadcast output AND custom
+ outputs */
+ SHADER_CHECKER_INTERFACE_TYPE_MISMATCH, /* Type mismatch between shader
+ stages or shader and pipeline */
+ SHADER_CHECKER_OUTPUT_NOT_CONSUMED, /* Entry appears in output interface,
+ but missing in input */
+ SHADER_CHECKER_INPUT_NOT_PRODUCED, /* Entry appears in input interface, but
+ missing in output */
+ SHADER_CHECKER_NON_SPIRV_SHADER, /* Shader image is not SPIR-V */
+ SHADER_CHECKER_INCONSISTENT_SPIRV, /* General inconsistency within a SPIR-V
+ module */
+ SHADER_CHECKER_UNKNOWN_STAGE, /* Stage is not supported by analysis */
+ SHADER_CHECKER_INCONSISTENT_VI, /* VI state contains conflicting binding or
+ attrib descriptions */
+ SHADER_CHECKER_MISSING_DESCRIPTOR, /* Shader attempts to use a descriptor
+ binding not declared in the layout */
} SHADER_CHECKER_ERROR;
-typedef enum _DRAW_TYPE
-{
- DRAW = 0,
- DRAW_INDEXED = 1,
- DRAW_INDIRECT = 2,
+typedef enum _DRAW_TYPE {
+ DRAW = 0,
+ DRAW_INDEXED = 1,
+ DRAW_INDIRECT = 2,
DRAW_INDEXED_INDIRECT = 3,
- DRAW_BEGIN_RANGE = DRAW,
- DRAW_END_RANGE = DRAW_INDEXED_INDIRECT,
- NUM_DRAW_TYPES = (DRAW_END_RANGE - DRAW_BEGIN_RANGE + 1),
+ DRAW_BEGIN_RANGE = DRAW,
+ DRAW_END_RANGE = DRAW_INDEXED_INDIRECT,
+ NUM_DRAW_TYPES = (DRAW_END_RANGE - DRAW_BEGIN_RANGE + 1),
} DRAW_TYPE;
typedef struct _SHADER_DS_MAPPING {
uint32_t slotCount;
- VkDescriptorSetLayoutCreateInfo* pShaderMappingSlot;
+ VkDescriptorSetLayoutCreateInfo *pShaderMappingSlot;
} SHADER_DS_MAPPING;
typedef struct _GENERIC_HEADER {
VkStructureType sType;
- const void* pNext;
+ const void *pNext;
} GENERIC_HEADER;
typedef struct _PIPELINE_NODE {
- VkPipeline pipeline;
- VkGraphicsPipelineCreateInfo graphicsPipelineCI;
- VkPipelineVertexInputStateCreateInfo vertexInputCI;
- VkPipelineInputAssemblyStateCreateInfo iaStateCI;
- VkPipelineTessellationStateCreateInfo tessStateCI;
- VkPipelineViewportStateCreateInfo vpStateCI;
- VkPipelineRasterizationStateCreateInfo rsStateCI;
- VkPipelineMultisampleStateCreateInfo msStateCI;
- VkPipelineColorBlendStateCreateInfo cbStateCI;
- VkPipelineDepthStencilStateCreateInfo dsStateCI;
- VkPipelineDynamicStateCreateInfo dynStateCI;
- VkPipelineShaderStageCreateInfo vsCI;
- VkPipelineShaderStageCreateInfo tcsCI;
- VkPipelineShaderStageCreateInfo tesCI;
- VkPipelineShaderStageCreateInfo gsCI;
- VkPipelineShaderStageCreateInfo fsCI;
+ VkPipeline pipeline;
+ VkGraphicsPipelineCreateInfo graphicsPipelineCI;
+ VkPipelineVertexInputStateCreateInfo vertexInputCI;
+ VkPipelineInputAssemblyStateCreateInfo iaStateCI;
+ VkPipelineTessellationStateCreateInfo tessStateCI;
+ VkPipelineViewportStateCreateInfo vpStateCI;
+ VkPipelineRasterizationStateCreateInfo rsStateCI;
+ VkPipelineMultisampleStateCreateInfo msStateCI;
+ VkPipelineColorBlendStateCreateInfo cbStateCI;
+ VkPipelineDepthStencilStateCreateInfo dsStateCI;
+ VkPipelineDynamicStateCreateInfo dynStateCI;
+ VkPipelineShaderStageCreateInfo vsCI;
+ VkPipelineShaderStageCreateInfo tcsCI;
+ VkPipelineShaderStageCreateInfo tesCI;
+ VkPipelineShaderStageCreateInfo gsCI;
+ VkPipelineShaderStageCreateInfo fsCI;
// Compute shader is include in VkComputePipelineCreateInfo
- VkComputePipelineCreateInfo computePipelineCI;
+ VkComputePipelineCreateInfo computePipelineCI;
// Flag of which shader stages are active for this pipeline
- uint32_t active_shaders;
+ uint32_t active_shaders;
// Capture which sets are actually used by the shaders of this pipeline
- std::set<unsigned> active_sets;
+ std::set<unsigned> active_sets;
// Vtx input info (if any)
- uint32_t vtxBindingCount; // number of bindings
- VkVertexInputBindingDescription* pVertexBindingDescriptions;
- uint32_t vtxAttributeCount; // number of attributes
- VkVertexInputAttributeDescription* pVertexAttributeDescriptions;
- uint32_t attachmentCount; // number of CB attachments
- VkPipelineColorBlendAttachmentState* pAttachments;
+ uint32_t vtxBindingCount; // number of bindings
+ VkVertexInputBindingDescription *pVertexBindingDescriptions;
+ uint32_t vtxAttributeCount; // number of attributes
+ VkVertexInputAttributeDescription *pVertexAttributeDescriptions;
+ uint32_t attachmentCount; // number of CB attachments
+ VkPipelineColorBlendAttachmentState *pAttachments;
// Default constructor
- _PIPELINE_NODE():pipeline{},
- graphicsPipelineCI{},
- vertexInputCI{},
- iaStateCI{},
- tessStateCI{},
- vpStateCI{},
- rsStateCI{},
- msStateCI{},
- cbStateCI{},
- dsStateCI{},
- dynStateCI{},
- vsCI{},
- tcsCI{},
- tesCI{},
- gsCI{},
- fsCI{},
- computePipelineCI{},
- active_shaders(0),
- vtxBindingCount(0),
- pVertexBindingDescriptions(0),
- vtxAttributeCount(0),
- pVertexAttributeDescriptions(0),
- attachmentCount(0),
- pAttachments(0)
- {};
+ _PIPELINE_NODE()
+ : pipeline{}, graphicsPipelineCI{}, vertexInputCI{}, iaStateCI{},
+ tessStateCI{}, vpStateCI{}, rsStateCI{}, msStateCI{}, cbStateCI{},
+ dsStateCI{}, dynStateCI{}, vsCI{}, tcsCI{}, tesCI{}, gsCI{}, fsCI{},
+ computePipelineCI{}, active_shaders(0), vtxBindingCount(0),
+ pVertexBindingDescriptions(0), vtxAttributeCount(0),
+ pVertexAttributeDescriptions(0), attachmentCount(0),
+ pAttachments(0){};
} PIPELINE_NODE;
class BASE_NODE {
@@ -210,15 +284,16 @@
};
typedef struct _SAMPLER_NODE {
- VkSampler sampler;
+ VkSampler sampler;
VkSamplerCreateInfo createInfo;
- _SAMPLER_NODE(const VkSampler* ps, const VkSamplerCreateInfo* pci) : sampler(*ps), createInfo(*pci) {};
+ _SAMPLER_NODE(const VkSampler *ps, const VkSamplerCreateInfo *pci)
+ : sampler(*ps), createInfo(*pci){};
} SAMPLER_NODE;
typedef struct _IMAGE_NODE {
VkImageLayout layout;
- VkFormat format;
+ VkFormat format;
} IMAGE_NODE;
typedef struct _IMAGE_CMD_BUF_NODE {
@@ -233,12 +308,12 @@
};
struct RENDER_PASS_NODE {
- VkRenderPassCreateInfo const* pCreateInfo;
+ VkRenderPassCreateInfo const *pCreateInfo;
std::vector<bool> hasSelfDependency;
vector<std::vector<VkFormat>> subpassColorFormats;
- RENDER_PASS_NODE(VkRenderPassCreateInfo const *pCreateInfo) : pCreateInfo(pCreateInfo)
- {
+ RENDER_PASS_NODE(VkRenderPassCreateInfo const *pCreateInfo)
+ : pCreateInfo(pCreateInfo) {
uint32_t i;
subpassColorFormats.reserve(pCreateInfo->subpassCount);
@@ -286,89 +361,105 @@
// Descriptor Data structures
// Layout Node has the core layout data
typedef struct _LAYOUT_NODE {
- VkDescriptorSetLayout layout;
+ VkDescriptorSetLayout layout;
VkDescriptorSetLayoutCreateInfo createInfo;
- uint32_t startIndex; // 1st index of this layout
- uint32_t endIndex; // last index of this layout
- uint32_t dynamicDescriptorCount; // Total count of dynamic descriptors used by this layout
- vector<VkDescriptorType> descriptorTypes; // Type per descriptor in this layout to verify correct updates
- vector<VkShaderStageFlags> stageFlags; // stageFlags per descriptor in this layout to verify correct updates
- unordered_set<uint32_t> bindings;
+ uint32_t startIndex; // 1st index of this layout
+ uint32_t endIndex; // last index of this layout
+ uint32_t dynamicDescriptorCount; // Total count of dynamic descriptors used
+ // by this layout
+ vector<VkDescriptorType> descriptorTypes; // Type per descriptor in this
+ // layout to verify correct
+ // updates
+ vector<VkShaderStageFlags> stageFlags; // stageFlags per descriptor in this
+ // layout to verify correct updates
+ unordered_set<uint32_t> bindings;
// Default constructor
- _LAYOUT_NODE():layout{},
- createInfo{},
- startIndex(0),
- endIndex(0),
- dynamicDescriptorCount(0)
- {};
+ _LAYOUT_NODE()
+ : layout{}, createInfo{}, startIndex(0), endIndex(0),
+ dynamicDescriptorCount(0){};
} LAYOUT_NODE;
// Store layouts and pushconstants for PipelineLayout
struct PIPELINE_LAYOUT_NODE {
- vector<VkDescriptorSetLayout> descriptorSetLayouts;
- vector<VkPushConstantRange> pushConstantRanges;
+ vector<VkDescriptorSetLayout> descriptorSetLayouts;
+ vector<VkPushConstantRange> pushConstantRanges;
};
class SET_NODE : public BASE_NODE {
public:
using BASE_NODE::in_use;
- VkDescriptorSet set;
- VkDescriptorPool pool;
+ VkDescriptorSet set;
+ VkDescriptorPool pool;
// Head of LL of all Update structs for this set
- GENERIC_HEADER* pUpdateStructs;
- // Total num of descriptors in this set (count of its layout plus all prior layouts)
- uint32_t descriptorCount;
- GENERIC_HEADER** ppDescriptors; // Array where each index points to update node for its slot
- LAYOUT_NODE* pLayout; // Layout for this set
- SET_NODE* pNext;
- unordered_set<VkCommandBuffer> boundCmdBuffers; // Cmd buffers that this set has been bound to
- SET_NODE() : pUpdateStructs(NULL), ppDescriptors(NULL), pLayout(NULL), pNext(NULL) {};
+ GENERIC_HEADER *pUpdateStructs;
+ // Total num of descriptors in this set (count of its layout plus all prior
+ // layouts)
+ uint32_t descriptorCount;
+ GENERIC_HEADER **ppDescriptors; // Array where each index points to update
+ // node for its slot
+ LAYOUT_NODE *pLayout; // Layout for this set
+ SET_NODE *pNext;
+ unordered_set<VkCommandBuffer>
+ boundCmdBuffers; // Cmd buffers that this set has been bound to
+ SET_NODE()
+ : pUpdateStructs(NULL), ppDescriptors(NULL), pLayout(NULL),
+ pNext(NULL){};
};
typedef struct _DESCRIPTOR_POOL_NODE {
- VkDescriptorPool pool;
- uint32_t maxSets;
+ VkDescriptorPool pool;
+ uint32_t maxSets;
VkDescriptorPoolCreateInfo createInfo;
- SET_NODE* pSets; // Head of LL of sets for this Pool
- vector<uint32_t> maxDescriptorTypeCount; // max # of descriptors of each type in this pool
- vector<uint32_t> availableDescriptorTypeCount; // available # of descriptors of each type in this pool
+ SET_NODE *pSets; // Head of LL of sets for this Pool
+ vector<uint32_t> maxDescriptorTypeCount; // max # of descriptors of each
+ // type in this pool
+ vector<uint32_t> availableDescriptorTypeCount; // available # of descriptors
+ // of each type in this pool
- _DESCRIPTOR_POOL_NODE(const VkDescriptorPool pool, const VkDescriptorPoolCreateInfo* pCreateInfo) :
- pool(pool), createInfo(*pCreateInfo), maxSets(pCreateInfo->maxSets), pSets(NULL),
- maxDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE), availableDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE)
- {
- if (createInfo.poolSizeCount) { // Shadow type struct from ptr into local struct
- size_t poolSizeCountSize = createInfo.poolSizeCount * sizeof(VkDescriptorPoolSize);
+ _DESCRIPTOR_POOL_NODE(const VkDescriptorPool pool,
+ const VkDescriptorPoolCreateInfo *pCreateInfo)
+ : pool(pool), createInfo(*pCreateInfo), maxSets(pCreateInfo->maxSets),
+ pSets(NULL), maxDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE),
+ availableDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE) {
+ if (createInfo.poolSizeCount) { // Shadow type struct from ptr into
+ // local struct
+ size_t poolSizeCountSize =
+ createInfo.poolSizeCount * sizeof(VkDescriptorPoolSize);
createInfo.pPoolSizes = new VkDescriptorPoolSize[poolSizeCountSize];
- memcpy((void*)createInfo.pPoolSizes, pCreateInfo->pPoolSizes, poolSizeCountSize);
- // Now set max counts for each descriptor type based on count of that type times maxSets
- uint32_t i=0;
- for (i=0; i<createInfo.poolSizeCount; ++i) {
- uint32_t typeIndex = static_cast<uint32_t>(createInfo.pPoolSizes[i].type);
- uint32_t poolSizeCount = createInfo.pPoolSizes[i].descriptorCount;
+ memcpy((void *)createInfo.pPoolSizes, pCreateInfo->pPoolSizes,
+ poolSizeCountSize);
+ // Now set max counts for each descriptor type based on count of
+ // that type times maxSets
+ uint32_t i = 0;
+ for (i = 0; i < createInfo.poolSizeCount; ++i) {
+ uint32_t typeIndex =
+ static_cast<uint32_t>(createInfo.pPoolSizes[i].type);
+ uint32_t poolSizeCount =
+ createInfo.pPoolSizes[i].descriptorCount;
maxDescriptorTypeCount[typeIndex] += poolSizeCount;
}
- for (i=0; i<maxDescriptorTypeCount.size(); ++i) {
+ for (i = 0; i < maxDescriptorTypeCount.size(); ++i) {
maxDescriptorTypeCount[i] *= createInfo.maxSets;
// Initially the available counts are equal to the max counts
availableDescriptorTypeCount[i] = maxDescriptorTypeCount[i];
}
} else {
- createInfo.pPoolSizes = NULL; // Make sure this is NULL so we don't try to clean it up
+ createInfo.pPoolSizes =
+ NULL; // Make sure this is NULL so we don't try to clean it up
}
}
~_DESCRIPTOR_POOL_NODE() {
if (createInfo.pPoolSizes) {
delete[] createInfo.pPoolSizes;
}
- // TODO : pSets are currently freed in deletePools function which uses freeShadowUpdateTree function
+ // TODO : pSets are currently freed in deletePools function which uses
+ // freeShadowUpdateTree function
// need to migrate that struct to smart ptrs for auto-cleanup
}
} DESCRIPTOR_POOL_NODE;
// Cmd Buffer Tracking
-typedef enum _CMD_TYPE
-{
+typedef enum _CMD_TYPE {
CMD_BINDPIPELINE,
CMD_BINDPIPELINEDELTA,
CMD_SETVIEWPORTSTATE,
@@ -422,12 +513,11 @@
} CMD_TYPE;
// Data structure for holding sequence of cmds in cmd buffer
typedef struct _CMD_NODE {
- CMD_TYPE type;
- uint64_t cmdNumber;
+ CMD_TYPE type;
+ uint64_t cmdNumber;
} CMD_NODE;
-typedef enum _CB_STATE
-{
+typedef enum _CB_STATE {
CB_NEW, // Newly created CB w/o any cmds
CB_RECORDING, // BeginCB has been called on this CB
CB_RECORDED, // EndCB has been called on this CB
@@ -435,125 +525,133 @@
} CB_STATE;
// CB Status -- used to track status of various bindings on cmd buffer objects
typedef VkFlags CBStatusFlags;
-typedef enum _CBStatusFlagBits
-{
- CBSTATUS_NONE = 0x00000000, // No status is set
- CBSTATUS_VIEWPORT_SET = 0x00000001, // Viewport has been set
- CBSTATUS_LINE_WIDTH_SET = 0x00000002, // Line width has been set
- CBSTATUS_DEPTH_BIAS_SET = 0x00000004, // Depth bias has been set
- CBSTATUS_COLOR_BLEND_WRITE_ENABLE = 0x00000008, // PSO w/ CB Enable set has been set
- CBSTATUS_BLEND_SET = 0x00000010, // Blend state object has been set
- CBSTATUS_DEPTH_WRITE_ENABLE = 0x00000020, // PSO w/ Depth Enable set has been set
- CBSTATUS_STENCIL_TEST_ENABLE = 0x00000040, // PSO w/ Stencil Enable set has been set
- CBSTATUS_DEPTH_BOUNDS_SET = 0x00000080, // Depth bounds state object has been set
- CBSTATUS_STENCIL_READ_MASK_SET = 0x00000100, // Stencil read mask has been set
- CBSTATUS_STENCIL_WRITE_MASK_SET = 0x00000200, // Stencil write mask has been set
- CBSTATUS_STENCIL_REFERENCE_SET = 0x00000400, // Stencil reference has been set
- CBSTATUS_INDEX_BUFFER_BOUND = 0x00000800, // Index buffer has been set
- CBSTATUS_SCISSOR_SET = 0x00001000, // Scissor has been set
- CBSTATUS_ALL = 0x00001FFF, // All dynamic state set
+typedef enum _CBStatusFlagBits {
+ CBSTATUS_NONE = 0x00000000, // No status is set
+ CBSTATUS_VIEWPORT_SET = 0x00000001, // Viewport has been set
+ CBSTATUS_LINE_WIDTH_SET = 0x00000002, // Line width has been set
+ CBSTATUS_DEPTH_BIAS_SET = 0x00000004, // Depth bias has been set
+ CBSTATUS_COLOR_BLEND_WRITE_ENABLE =
+ 0x00000008, // PSO w/ CB Enable set has been set
+ CBSTATUS_BLEND_SET = 0x00000010, // Blend state object has been set
+ CBSTATUS_DEPTH_WRITE_ENABLE =
+ 0x00000020, // PSO w/ Depth Enable set has been set
+ CBSTATUS_STENCIL_TEST_ENABLE =
+ 0x00000040, // PSO w/ Stencil Enable set has been set
+ CBSTATUS_DEPTH_BOUNDS_SET =
+ 0x00000080, // Depth bounds state object has been set
+ CBSTATUS_STENCIL_READ_MASK_SET =
+ 0x00000100, // Stencil read mask has been set
+ CBSTATUS_STENCIL_WRITE_MASK_SET =
+ 0x00000200, // Stencil write mask has been set
+ CBSTATUS_STENCIL_REFERENCE_SET =
+ 0x00000400, // Stencil reference has been set
+ CBSTATUS_INDEX_BUFFER_BOUND = 0x00000800, // Index buffer has been set
+ CBSTATUS_SCISSOR_SET = 0x00001000, // Scissor has been set
+ CBSTATUS_ALL = 0x00001FFF, // All dynamic state set
} CBStatusFlagBits;
typedef struct stencil_data {
- uint32_t compareMask;
- uint32_t writeMask;
- uint32_t reference;
+ uint32_t compareMask;
+ uint32_t writeMask;
+ uint32_t reference;
} CBStencilData;
-typedef struct _DRAW_DATA {
- vector<VkBuffer> buffers;
-} DRAW_DATA;
+typedef struct _DRAW_DATA { vector<VkBuffer> buffers; } DRAW_DATA;
struct QueryObject {
VkQueryPool pool;
uint32_t index;
};
-bool operator==(const QueryObject& query1, const QueryObject& query2) {
+bool operator==(const QueryObject &query1, const QueryObject &query2) {
return (query1.pool == query2.pool && query1.index == query2.index);
}
namespace std {
-template <>
-struct hash<QueryObject> {
+template <> struct hash<QueryObject> {
size_t operator()(QueryObject query) const throw() {
- return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.index);
+ return hash<uint64_t>()((uint64_t)(query.pool)) ^
+ hash<uint32_t>()(query.index);
}
};
}
// Cmd Buffer Wrapper Struct
typedef struct _GLOBAL_CB_NODE {
- VkCommandBuffer commandBuffer;
- VkCommandBufferAllocateInfo createInfo;
- VkCommandBufferBeginInfo beginInfo;
+ VkCommandBuffer commandBuffer;
+ VkCommandBufferAllocateInfo createInfo;
+ VkCommandBufferBeginInfo beginInfo;
VkCommandBufferInheritanceInfo inheritanceInfo;
- VkFence fence; // fence tracking this cmd buffer
- VkDevice device; // device this DB belongs to
- uint64_t numCmds; // number of cmds in this CB
- uint64_t drawCount[NUM_DRAW_TYPES]; // Count of each type of draw in this CB
- CB_STATE state; // Track cmd buffer update state
- uint64_t submitCount; // Number of times CB has been submitted
- CBStatusFlags status; // Track status of various bindings on cmd buffer
- vector<CMD_NODE> cmds; // vector of commands bound to this command buffer
+ VkFence fence; // fence tracking this cmd buffer
+ VkDevice device; // device this DB belongs to
+ uint64_t numCmds; // number of cmds in this CB
+ uint64_t drawCount[NUM_DRAW_TYPES]; // Count of each type of draw in this CB
+ CB_STATE state; // Track cmd buffer update state
+ uint64_t submitCount; // Number of times CB has been submitted
+ CBStatusFlags status; // Track status of various bindings on cmd buffer
+ vector<CMD_NODE> cmds; // vector of commands bound to this command buffer
// Currently storing "lastBound" objects on per-CB basis
// long-term may want to create caches of "lastBound" states and could have
// each individual CMD_NODE referencing its own "lastBound" state
- VkPipeline lastBoundPipeline;
- uint32_t lastVtxBinding;
- vector<VkBuffer> boundVtxBuffers;
- vector<VkViewport> viewports;
- vector<VkRect2D> scissors;
- float lineWidth;
- float depthBiasConstantFactor;
- float depthBiasClamp;
- float depthBiasSlopeFactor;
- float blendConstants[4];
- float minDepthBounds;
- float maxDepthBounds;
- CBStencilData front;
- CBStencilData back;
- VkDescriptorSet lastBoundDescriptorSet;
- VkPipelineLayout lastBoundPipelineLayout;
- VkRenderPassBeginInfo activeRenderPassBeginInfo;
- VkRenderPass activeRenderPass;
- VkSubpassContents activeSubpassContents;
- uint32_t activeSubpass;
- VkFramebuffer framebuffer;
+ VkPipeline lastBoundPipeline;
+ uint32_t lastVtxBinding;
+ vector<VkBuffer> boundVtxBuffers;
+ vector<VkViewport> viewports;
+ vector<VkRect2D> scissors;
+ float lineWidth;
+ float depthBiasConstantFactor;
+ float depthBiasClamp;
+ float depthBiasSlopeFactor;
+ float blendConstants[4];
+ float minDepthBounds;
+ float maxDepthBounds;
+ CBStencilData front;
+ CBStencilData back;
+ VkDescriptorSet lastBoundDescriptorSet;
+ VkPipelineLayout lastBoundPipelineLayout;
+ VkRenderPassBeginInfo activeRenderPassBeginInfo;
+ VkRenderPass activeRenderPass;
+ VkSubpassContents activeSubpassContents;
+ uint32_t activeSubpass;
+ VkFramebuffer framebuffer;
// Capture unique std::set of descriptorSets that are bound to this CB.
- std::set<VkDescriptorSet> uniqueBoundSets;
- // Keep running track of which sets are bound to which set# at any given time
+ std::set<VkDescriptorSet> uniqueBoundSets;
+ // Keep running track of which sets are bound to which set# at any given
+ // time
// Track descriptor sets that are destroyed or updated while bound to CB
- std::set<VkDescriptorSet> destroyedSets;
- std::set<VkDescriptorSet> updatedSets;
- vector<VkDescriptorSet> boundDescriptorSets; // Index is set# that given set is bound to
- vector<VkEvent> waitedEvents;
- unordered_map<QueryObject, vector<VkEvent> > waitedEventsBeforeQueryReset;
- unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
+ std::set<VkDescriptorSet> destroyedSets;
+ std::set<VkDescriptorSet> updatedSets;
+ vector<VkDescriptorSet>
+ boundDescriptorSets; // Index is set# that given set is bound to
+ vector<VkEvent> waitedEvents;
+ unordered_map<QueryObject, vector<VkEvent>> waitedEventsBeforeQueryReset;
+ unordered_map<QueryObject, bool>
+ queryToStateMap; // 0 is unavailable, 1 is available
unordered_map<VkImage, IMAGE_CMD_BUF_NODE> imageLayoutMap;
- vector<DRAW_DATA> drawData;
- DRAW_DATA currentDrawData;
- // If cmd buffer is primary, track secondary command buffers pending execution
+ vector<DRAW_DATA> drawData;
+ DRAW_DATA currentDrawData;
+ // If cmd buffer is primary, track secondary command buffers pending
+ // execution
std::unordered_set<VkCommandBuffer> secondaryCommandBuffers;
- vector<uint32_t> dynamicOffsets; // one dynamic offset per dynamic descriptor bound to this CB
+ vector<uint32_t> dynamicOffsets; // one dynamic offset per dynamic
+ // descriptor bound to this CB
} GLOBAL_CB_NODE;
typedef struct _SWAPCHAIN_NODE {
- VkSwapchainCreateInfoKHR createInfo;
- uint32_t* pQueueFamilyIndices;
- std::vector<VkImage> images;
- _SWAPCHAIN_NODE(const VkSwapchainCreateInfoKHR *pCreateInfo) :
- createInfo(*pCreateInfo),
- pQueueFamilyIndices(NULL)
- {
+ VkSwapchainCreateInfoKHR createInfo;
+ uint32_t *pQueueFamilyIndices;
+ std::vector<VkImage> images;
+ _SWAPCHAIN_NODE(const VkSwapchainCreateInfoKHR *pCreateInfo)
+ : createInfo(*pCreateInfo), pQueueFamilyIndices(NULL) {
if (pCreateInfo->queueFamilyIndexCount) {
- pQueueFamilyIndices = new uint32_t[pCreateInfo->queueFamilyIndexCount];
- memcpy(pQueueFamilyIndices, pCreateInfo->pQueueFamilyIndices, pCreateInfo->queueFamilyIndexCount*sizeof(uint32_t));
+ pQueueFamilyIndices =
+ new uint32_t[pCreateInfo->queueFamilyIndexCount];
+ memcpy(pQueueFamilyIndices, pCreateInfo->pQueueFamilyIndices,
+ pCreateInfo->queueFamilyIndexCount * sizeof(uint32_t));
createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
}
}
- ~_SWAPCHAIN_NODE()
- {
+ ~_SWAPCHAIN_NODE() {
if (pQueueFamilyIndices)
delete pQueueFamilyIndices;
}
diff --git a/layers/image.cpp b/layers/image.cpp
index a40b2f2..56d1bb5 100644
--- a/layers/image.cpp
+++ b/layers/image.cpp
@@ -58,37 +58,33 @@
using namespace std;
struct layer_data {
- debug_report_data *report_data;
- vector<VkDebugReportCallbackEXT> logging_callback;
- VkLayerDispatchTable* device_dispatch_table;
+ debug_report_data *report_data;
+ vector<VkDebugReportCallbackEXT> logging_callback;
+ VkLayerDispatchTable *device_dispatch_table;
VkLayerInstanceDispatchTable *instance_dispatch_table;
- VkPhysicalDevice physicalDevice;
- VkPhysicalDeviceProperties physicalDeviceProperties;
+ VkPhysicalDevice physicalDevice;
+ VkPhysicalDeviceProperties physicalDeviceProperties;
unordered_map<VkImage, IMAGE_STATE> imageMap;
- layer_data() :
- report_data(nullptr),
- device_dispatch_table(nullptr),
- instance_dispatch_table(nullptr),
- physicalDevice(0),
- physicalDeviceProperties()
- {};
+ layer_data()
+ : report_data(nullptr), device_dispatch_table(nullptr),
+ instance_dispatch_table(nullptr), physicalDevice(0),
+ physicalDeviceProperties(){};
};
-static unordered_map<void*, layer_data*> layer_data_map;
+static unordered_map<void *, layer_data *> layer_data_map;
-static void InitImage(layer_data *data, const VkAllocationCallbacks *pAllocator)
-{
+static void InitImage(layer_data *data,
+ const VkAllocationCallbacks *pAllocator) {
VkDebugReportCallbackEXT callback;
uint32_t report_flags = getLayerOptionFlags("ImageReportFlags", 0);
uint32_t debug_action = 0;
- getLayerOptionEnum("ImageDebugAction", (uint32_t *) &debug_action);
- if(debug_action & VK_DBG_LAYER_ACTION_LOG_MSG)
- {
+ getLayerOptionEnum("ImageDebugAction", (uint32_t *)&debug_action);
+ if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
FILE *log_output = NULL;
- const char* option_str = getLayerOption("ImageLogFilename");
+ const char *option_str = getLayerOption("ImageLogFilename");
log_output = getLayerLogOutput(option_str, "Image");
VkDebugReportCallbackCreateInfoEXT dbgInfo;
memset(&dbgInfo, 0, sizeof(dbgInfo));
@@ -96,7 +92,8 @@
dbgInfo.pfnCallback = log_callback;
dbgInfo.pUserData = log_output;
dbgInfo.flags = report_flags;
- layer_create_msg_callback(data->report_data, &dbgInfo, pAllocator, &callback);
+ layer_create_msg_callback(data->report_data, &dbgInfo, pAllocator,
+ &callback);
data->logging_callback.push_back(callback);
}
@@ -106,56 +103,63 @@
dbgInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
dbgInfo.pfnCallback = win32_debug_output_msg;
dbgInfo.flags = report_flags;
- layer_create_msg_callback(data->report_data, &dbgInfo, pAllocator, &callback);
+ layer_create_msg_callback(data->report_data, &dbgInfo, pAllocator,
+ &callback);
data->logging_callback.push_back(callback);
}
}
VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pMsgCallback)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- VkResult res = my_data->instance_dispatch_table->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
+ VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDebugReportCallbackEXT *pMsgCallback) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ VkResult res =
+ my_data->instance_dispatch_table->CreateDebugReportCallbackEXT(
+ instance, pCreateInfo, pAllocator, pMsgCallback);
if (res == VK_SUCCESS) {
- res = layer_create_msg_callback(my_data->report_data, pCreateInfo, pAllocator, pMsgCallback);
+ res = layer_create_msg_callback(my_data->report_data, pCreateInfo,
+ pAllocator, pMsgCallback);
}
return res;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT msgCallback,
- const VkAllocationCallbacks* pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- my_data->instance_dispatch_table->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDebugReportCallbackEXT(VkInstance instance,
+ VkDebugReportCallbackEXT msgCallback,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ my_data->instance_dispatch_table->DestroyDebugReportCallbackEXT(
+ instance, msgCallback, pAllocator);
layer_destroy_msg_callback(my_data->report_data, msgCallback, pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objType,
- uint64_t object,
- size_t location,
- int32_t msgCode,
- const char* pLayerPrefix,
- const char* pMsg)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- my_data->instance_dispatch_table->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
+ VkDebugReportObjectTypeEXT objType, uint64_t object,
+ size_t location, int32_t msgCode,
+ const char *pLayerPrefix, const char *pMsg) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ my_data->instance_dispatch_table->DebugReportMessageEXT(
+ instance, flags, objType, object, location, msgCode, pLayerPrefix,
+ pMsg);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance)
-{
- VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkInstance *pInstance) {
+ VkLayerInstanceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkCreateInstance fpCreateInstance =
+ (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
if (fpCreateInstance == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -167,23 +171,25 @@
if (result != VK_SUCCESS)
return result;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
my_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
- layer_init_instance_dispatch_table(*pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
+ layer_init_instance_dispatch_table(
+ *pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
my_data->report_data = debug_report_create_instance(
- my_data->instance_dispatch_table,
- *pInstance,
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
+ my_data->instance_dispatch_table, *pInstance,
+ pCreateInfo->enabledExtensionCount,
+ pCreateInfo->ppEnabledExtensionNames);
InitImage(my_data, pAllocator);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyInstance(VkInstance instance,
+ const VkAllocationCallbacks *pAllocator) {
// Grab the key before the instance is destroyed.
dispatch_key key = get_dispatch_key(instance);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
@@ -200,17 +206,22 @@
layer_debug_report_destroy_instance(my_data->report_data);
delete my_data->instance_dispatch_table;
layer_data_map.erase(key);
-
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice)
-{
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDevice(VkPhysicalDevice physicalDevice,
+ const VkDeviceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
+ VkLayerDeviceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+ PFN_vkCreateDevice fpCreateDevice =
+ (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
if (fpCreateDevice == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -218,28 +229,34 @@
// Advance the link info for the next element on the chain
chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
- VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
+ VkResult result =
+ fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
if (result != VK_SUCCESS) {
return result;
}
- layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
+ layer_data *my_instance_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
// Setup device dispatch table
my_device_data->device_dispatch_table = new VkLayerDispatchTable;
- layer_init_device_dispatch_table(*pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
+ layer_init_device_dispatch_table(
+ *pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
- my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
+ my_device_data->report_data = layer_debug_report_create_device(
+ my_instance_data->report_data, *pDevice);
my_device_data->physicalDevice = physicalDevice;
- my_instance_data->instance_dispatch_table->GetPhysicalDeviceProperties(physicalDevice, &(my_device_data->physicalDeviceProperties));
+ my_instance_data->instance_dispatch_table->GetPhysicalDeviceProperties(
+ physicalDevice, &(my_device_data->physicalDeviceProperties));
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
dispatch_key key = get_dispatch_key(device);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
my_data->device_dispatch_table->DestroyDevice(device, pAllocator);
@@ -248,181 +265,196 @@
}
static const VkExtensionProperties instance_extensions[] = {
- {
- VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
- VK_EXT_DEBUG_REPORT_SPEC_VERSION
- }
-};
+ {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
- const char *pLayerName,
- uint32_t *pCount,
- VkExtensionProperties* pProperties)
-{
- return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceExtensionProperties(const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
+ return util_GetExtensionProperties(1, instance_extensions, pCount,
+ pProperties);
}
-static const VkLayerProperties pc_global_layers[] = {
- {
- "VK_LAYER_LUNARG_image",
- VK_API_VERSION,
- VK_MAKE_VERSION(0, 1, 0),
- "Validation layer: image",
- }
-};
+static const VkLayerProperties pc_global_layers[] = {{
+ "VK_LAYER_LUNARG_image", VK_API_VERSION, VK_MAKE_VERSION(0, 1, 0),
+ "Validation layer: image",
+}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
- uint32_t *pCount,
- VkLayerProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+ VkLayerProperties *pProperties) {
return util_GetLayerProperties(ARRAY_SIZE(pc_global_layers),
- pc_global_layers,
- pCount, pProperties);
+ pc_global_layers, pCount, pProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pCount,
- VkExtensionProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+ const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
// Image does not have any physical device extensions
if (pLayerName == NULL) {
dispatch_key key = get_dispatch_key(physicalDevice);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
- return pTable->EnumerateDeviceExtensionProperties(
- physicalDevice,
- NULL,
- pCount,
- pProperties);
+ return pTable->EnumerateDeviceExtensionProperties(physicalDevice, NULL,
+ pCount, pProperties);
} else {
return util_GetExtensionProperties(0, NULL, pCount, pProperties);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCount,
- VkLayerProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
+ uint32_t *pCount,
+ VkLayerProperties *pProperties) {
// ParamChecker's physical device layers are the same as global
- return util_GetLayerProperties(ARRAY_SIZE(pc_global_layers), pc_global_layers,
- pCount, pProperties);
+ return util_GetLayerProperties(ARRAY_SIZE(pc_global_layers),
+ pc_global_layers, pCount, pProperties);
}
// Start of the Image layer proper
// Returns TRUE if a format is a depth-compatible format
-bool is_depth_format(VkFormat format)
-{
+bool is_depth_format(VkFormat format) {
bool result = VK_FALSE;
switch (format) {
- case VK_FORMAT_D16_UNORM:
- case VK_FORMAT_X8_D24_UNORM_PACK32:
- case VK_FORMAT_D32_SFLOAT:
- case VK_FORMAT_S8_UINT:
- case VK_FORMAT_D16_UNORM_S8_UINT:
- case VK_FORMAT_D24_UNORM_S8_UINT:
- case VK_FORMAT_D32_SFLOAT_S8_UINT:
- result = VK_TRUE;
- break;
- default:
- break;
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D32_SFLOAT:
+ case VK_FORMAT_S8_UINT:
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ result = VK_TRUE;
+ break;
+ default:
+ break;
}
return result;
}
-static inline uint32_t validate_VkImageLayoutKHR(VkImageLayout input_value)
-{
+static inline uint32_t validate_VkImageLayoutKHR(VkImageLayout input_value) {
return ((validate_VkImageLayout(input_value) == 1) ||
(input_value == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR));
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage)
-{
- VkBool32 skipCall = VK_FALSE;
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
+ VkBool32 skipCall = VK_FALSE;
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
VkImageFormatProperties ImageFormatProperties = {0};
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkPhysicalDevice physicalDevice = device_data->physicalDevice;
- layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkPhysicalDevice physicalDevice = device_data->physicalDevice;
+ layer_data *phy_dev_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- if (pCreateInfo->format != VK_FORMAT_UNDEFINED)
- {
+ if (pCreateInfo->format != VK_FORMAT_UNDEFINED) {
VkFormatProperties properties;
- phy_dev_data->instance_dispatch_table->GetPhysicalDeviceFormatProperties(
+ phy_dev_data->instance_dispatch_table
+ ->GetPhysicalDeviceFormatProperties(
device_data->physicalDevice, pCreateInfo->format, &properties);
- if ((properties.linearTilingFeatures) == 0 && (properties.optimalTilingFeatures == 0))
- {
- char const str[] = "vkCreateImage parameter, VkFormat pCreateInfo->format, contains unsupported format";
- // TODO: Verify against Valid Use section of spec. Generally if something yield an undefined result, it's invalid
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_FORMAT_UNSUPPORTED, "IMAGE", str);
+ if ((properties.linearTilingFeatures) == 0 &&
+ (properties.optimalTilingFeatures == 0)) {
+ char const str[] = "vkCreateImage parameter, VkFormat "
+ "pCreateInfo->format, contains unsupported "
+ "format";
+ // TODO: Verify against Valid Use section of spec. Generally if
+ // something yield an undefined result, it's invalid
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_FORMAT_UNSUPPORTED, "IMAGE", str);
}
}
- // Internal call to get format info. Still goes through layers, could potentially go directly to ICD.
- phy_dev_data->instance_dispatch_table->GetPhysicalDeviceImageFormatProperties(
- physicalDevice, pCreateInfo->format, pCreateInfo->imageType, pCreateInfo->tiling,
- pCreateInfo->usage, pCreateInfo->flags, &ImageFormatProperties);
+ // Internal call to get format info. Still goes through layers, could
+ // potentially go directly to ICD.
+ phy_dev_data->instance_dispatch_table
+ ->GetPhysicalDeviceImageFormatProperties(
+ physicalDevice, pCreateInfo->format, pCreateInfo->imageType,
+ pCreateInfo->tiling, pCreateInfo->usage, pCreateInfo->flags,
+ &ImageFormatProperties);
- VkDeviceSize imageGranularity = device_data->physicalDeviceProperties.limits.bufferImageGranularity;
+ VkDeviceSize imageGranularity =
+ device_data->physicalDeviceProperties.limits.bufferImageGranularity;
imageGranularity = imageGranularity == 1 ? 0 : imageGranularity;
- if ((pCreateInfo->extent.depth > ImageFormatProperties.maxExtent.depth) ||
- (pCreateInfo->extent.width > ImageFormatProperties.maxExtent.width) ||
+ if ((pCreateInfo->extent.depth > ImageFormatProperties.maxExtent.depth) ||
+ (pCreateInfo->extent.width > ImageFormatProperties.maxExtent.width) ||
(pCreateInfo->extent.height > ImageFormatProperties.maxExtent.height)) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage, __LINE__,
- IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
- "CreateImage extents exceed allowable limits for format: "
- "Width = %d Height = %d Depth = %d: Limits for Width = %d Height = %d Depth = %d for format %s.",
- pCreateInfo->extent.width, pCreateInfo->extent.height, pCreateInfo->extent.depth,
- ImageFormatProperties.maxExtent.width, ImageFormatProperties.maxExtent.height, ImageFormatProperties.maxExtent.depth,
- string_VkFormat(pCreateInfo->format));
-
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage, __LINE__,
+ IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
+ "CreateImage extents exceed allowable limits for format: "
+ "Width = %d Height = %d Depth = %d: Limits for Width = %d Height "
+ "= %d Depth = %d for format %s.",
+ pCreateInfo->extent.width, pCreateInfo->extent.height,
+ pCreateInfo->extent.depth, ImageFormatProperties.maxExtent.width,
+ ImageFormatProperties.maxExtent.height,
+ ImageFormatProperties.maxExtent.depth,
+ string_VkFormat(pCreateInfo->format));
}
- uint64_t totalSize = ((uint64_t)pCreateInfo->extent.width *
- (uint64_t)pCreateInfo->extent.height *
- (uint64_t)pCreateInfo->extent.depth *
- (uint64_t)pCreateInfo->arrayLayers *
- (uint64_t)pCreateInfo->samples *
- (uint64_t)vk_format_get_size(pCreateInfo->format) +
- (uint64_t)imageGranularity ) & ~(uint64_t)imageGranularity;
+ uint64_t totalSize =
+ ((uint64_t)pCreateInfo->extent.width *
+ (uint64_t)pCreateInfo->extent.height *
+ (uint64_t)pCreateInfo->extent.depth *
+ (uint64_t)pCreateInfo->arrayLayers *
+ (uint64_t)pCreateInfo->samples *
+ (uint64_t)vk_format_get_size(pCreateInfo->format) +
+ (uint64_t)imageGranularity) &
+ ~(uint64_t)imageGranularity;
if (totalSize > ImageFormatProperties.maxResourceSize) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage, __LINE__,
- IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
- "CreateImage resource size exceeds allowable maximum "
- "Image resource size = %#" PRIxLEAST64 ", maximum resource size = %#" PRIxLEAST64 " ",
- totalSize, ImageFormatProperties.maxResourceSize);
+ skipCall |=
+ log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage,
+ __LINE__, IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
+ "CreateImage resource size exceeds allowable maximum "
+ "Image resource size = %#" PRIxLEAST64
+ ", maximum resource size = %#" PRIxLEAST64 " ",
+ totalSize, ImageFormatProperties.maxResourceSize);
}
if (pCreateInfo->mipLevels > ImageFormatProperties.maxMipLevels) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage, __LINE__,
- IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
- "CreateImage mipLevels=%d exceeds allowable maximum supported by format of %d",
- pCreateInfo->mipLevels, ImageFormatProperties.maxMipLevels);
+ skipCall |=
+ log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage,
+ __LINE__, IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
+ "CreateImage mipLevels=%d exceeds allowable maximum "
+ "supported by format of %d",
+ pCreateInfo->mipLevels, ImageFormatProperties.maxMipLevels);
}
if (pCreateInfo->arrayLayers > ImageFormatProperties.maxArrayLayers) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage, __LINE__,
- IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
- "CreateImage arrayLayers=%d exceeds allowable maximum supported by format of %d",
- pCreateInfo->arrayLayers, ImageFormatProperties.maxArrayLayers);
+ skipCall |= log_msg(
+ phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage, __LINE__,
+ IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
+ "CreateImage arrayLayers=%d exceeds allowable maximum supported by "
+ "format of %d",
+ pCreateInfo->arrayLayers, ImageFormatProperties.maxArrayLayers);
}
if ((pCreateInfo->samples & ImageFormatProperties.sampleCounts) == 0) {
- skipCall |= log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage, __LINE__,
- IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
- "CreateImage samples %s is not supported by format 0x%.8X",
- string_VkSampleCountFlagBits(pCreateInfo->samples), ImageFormatProperties.sampleCounts);
+ skipCall |=
+ log_msg(phy_dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)pImage,
+ __LINE__, IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, "Image",
+ "CreateImage samples %s is not supported by format 0x%.8X",
+ string_VkSampleCountFlagBits(pCreateInfo->samples),
+ ImageFormatProperties.sampleCounts);
}
if (VK_FALSE == skipCall) {
- result = device_data->device_dispatch_table->CreateImage(device, pCreateInfo, pAllocator, pImage);
+ result = device_data->device_dispatch_table->CreateImage(
+ device, pCreateInfo, pAllocator, pImage);
}
if (result == VK_SUCCESS) {
device_data->imageMap[*pImage] = IMAGE_STATE(pCreateInfo);
@@ -430,213 +462,350 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator)
-{
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyImage(VkDevice device, VkImage image,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
device_data->imageMap.erase(image);
device_data->device_dispatch_table->DestroyImage(device, image, pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateRenderPass(VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkRenderPass *pRenderPass) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skipCall = VK_FALSE;
- for(uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i)
- {
- if(pCreateInfo->pAttachments[i].format != VK_FORMAT_UNDEFINED)
- {
+ for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+ if (pCreateInfo->pAttachments[i].format != VK_FORMAT_UNDEFINED) {
VkFormatProperties properties;
- get_my_data_ptr(get_dispatch_key(my_data->physicalDevice), layer_data_map)->instance_dispatch_table->GetPhysicalDeviceFormatProperties(
- my_data->physicalDevice, pCreateInfo->pAttachments[i].format, &properties);
+ get_my_data_ptr(get_dispatch_key(my_data->physicalDevice),
+ layer_data_map)
+ ->instance_dispatch_table->GetPhysicalDeviceFormatProperties(
+ my_data->physicalDevice,
+ pCreateInfo->pAttachments[i].format, &properties);
- if((properties.linearTilingFeatures) == 0 && (properties.optimalTilingFeatures == 0))
- {
+ if ((properties.linearTilingFeatures) == 0 &&
+ (properties.optimalTilingFeatures == 0)) {
std::stringstream ss;
- ss << "vkCreateRenderPass parameter, VkFormat in pCreateInfo->pAttachments[" << i << "], contains unsupported format";
- // TODO: Verify against Valid Use section of spec. Generally if something yield an undefined result, it's invalid
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_FORMAT_UNSUPPORTED, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateRenderPass parameter, VkFormat in "
+ "pCreateInfo->pAttachments[" << i
+ << "], contains unsupported format";
+ // TODO: Verify against Valid Use section of spec. Generally if
+ // something yield an undefined result, it's invalid
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_FORMAT_UNSUPPORTED, "IMAGE", "%s", ss.str().c_str());
}
}
}
- for(uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i)
- {
- if(!validate_VkImageLayoutKHR(pCreateInfo->pAttachments[i].initialLayout) ||
- !validate_VkImageLayoutKHR(pCreateInfo->pAttachments[i].finalLayout))
- {
+ for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+ if (!validate_VkImageLayoutKHR(
+ pCreateInfo->pAttachments[i].initialLayout) ||
+ !validate_VkImageLayoutKHR(
+ pCreateInfo->pAttachments[i].finalLayout)) {
std::stringstream ss;
- ss << "vkCreateRenderPass parameter, VkImageLayout in pCreateInfo->pAttachments[" << i << "], is unrecognized";
- // TODO: Verify against Valid Use section of spec. Generally if something yield an undefined result, it's invalid
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_RENDERPASS_INVALID_ATTACHMENT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateRenderPass parameter, VkImageLayout in "
+ "pCreateInfo->pAttachments[" << i << "], is unrecognized";
+ // TODO: Verify against Valid Use section of spec. Generally if
+ // something yield an undefined result, it's invalid
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_RENDERPASS_INVALID_ATTACHMENT, "IMAGE", "%s",
+ ss.str().c_str());
}
}
- for(uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i)
- {
- if(!validate_VkAttachmentLoadOp(pCreateInfo->pAttachments[i].loadOp))
- {
+ for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+ if (!validate_VkAttachmentLoadOp(pCreateInfo->pAttachments[i].loadOp)) {
std::stringstream ss;
- ss << "vkCreateRenderPass parameter, VkAttachmentLoadOp in pCreateInfo->pAttachments[" << i << "], is unrecognized";
- // TODO: Verify against Valid Use section of spec. Generally if something yield an undefined result, it's invalid
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_RENDERPASS_INVALID_ATTACHMENT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateRenderPass parameter, VkAttachmentLoadOp in "
+ "pCreateInfo->pAttachments[" << i << "], is unrecognized";
+ // TODO: Verify against Valid Use section of spec. Generally if
+ // something yield an undefined result, it's invalid
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_RENDERPASS_INVALID_ATTACHMENT, "IMAGE", "%s",
+ ss.str().c_str());
}
}
- for(uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i)
- {
- if(!validate_VkAttachmentStoreOp(pCreateInfo->pAttachments[i].storeOp))
- {
+ for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+ if (!validate_VkAttachmentStoreOp(
+ pCreateInfo->pAttachments[i].storeOp)) {
std::stringstream ss;
- ss << "vkCreateRenderPass parameter, VkAttachmentStoreOp in pCreateInfo->pAttachments[" << i << "], is unrecognized";
- // TODO: Verify against Valid Use section of spec. Generally if something yield an undefined result, it's invalid
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_RENDERPASS_INVALID_ATTACHMENT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateRenderPass parameter, VkAttachmentStoreOp in "
+ "pCreateInfo->pAttachments[" << i << "], is unrecognized";
+ // TODO: Verify against Valid Use section of spec. Generally if
+ // something yield an undefined result, it's invalid
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_RENDERPASS_INVALID_ATTACHMENT, "IMAGE", "%s",
+ ss.str().c_str());
}
}
// Any depth buffers specified as attachments?
bool depthFormatPresent = VK_FALSE;
- for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i)
- {
- depthFormatPresent |= is_depth_format(pCreateInfo->pAttachments[i].format);
+ for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
+ depthFormatPresent |=
+ is_depth_format(pCreateInfo->pAttachments[i].format);
}
if (depthFormatPresent == VK_FALSE) {
- // No depth attachment is present, validate that subpasses set depthStencilAttachment to VK_ATTACHMENT_UNUSED;
+ // No depth attachment is present, validate that subpasses set
+ // depthStencilAttachment to VK_ATTACHMENT_UNUSED;
for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++) {
if (pCreateInfo->pSubpasses[i].pDepthStencilAttachment &&
- pCreateInfo->pSubpasses[i].pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+ pCreateInfo->pSubpasses[i]
+ .pDepthStencilAttachment->attachment !=
+ VK_ATTACHMENT_UNUSED) {
std::stringstream ss;
- ss << "vkCreateRenderPass has no depth/stencil attachment, yet subpass[" << i << "] has VkSubpassDescription::depthStencilAttachment value that is not VK_ATTACHMENT_UNUSED";
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_RENDERPASS_INVALID_DS_ATTACHMENT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateRenderPass has no depth/stencil attachment, yet "
+ "subpass[" << i
+ << "] has VkSubpassDescription::depthStencilAttachment "
+ "value that is not VK_ATTACHMENT_UNUSED";
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_RENDERPASS_INVALID_DS_ATTACHMENT, "IMAGE",
+ "%s", ss.str().c_str());
}
}
}
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = my_data->device_dispatch_table->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+ VkResult result = my_data->device_dispatch_table->CreateRenderPass(
+ device, pCreateInfo, pAllocator, pRenderPass);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkImageView *pView) {
VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
auto imageEntry = device_data->imageMap.find(pCreateInfo->image);
if (imageEntry != device_data->imageMap.end()) {
- if (pCreateInfo->subresourceRange.baseMipLevel >= imageEntry->second.mipLevels) {
+ if (pCreateInfo->subresourceRange.baseMipLevel >=
+ imageEntry->second.mipLevels) {
std::stringstream ss;
- ss << "vkCreateImageView called with baseMipLevel " << pCreateInfo->subresourceRange.baseMipLevel
- << " for image " << pCreateInfo->image << " that only has " << imageEntry->second.mipLevels << " mip levels.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView called with baseMipLevel "
+ << pCreateInfo->subresourceRange.baseMipLevel << " for image "
+ << pCreateInfo->image << " that only has "
+ << imageEntry->second.mipLevels << " mip levels.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
}
- if (pCreateInfo->subresourceRange.baseArrayLayer >= imageEntry->second.arraySize) {
+ if (pCreateInfo->subresourceRange.baseArrayLayer >=
+ imageEntry->second.arraySize) {
std::stringstream ss;
- ss << "vkCreateImageView called with baseArrayLayer " << pCreateInfo->subresourceRange.baseArrayLayer << " for image "
- << pCreateInfo->image << " that only has " << imageEntry->second.arraySize << " mip levels.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView called with baseArrayLayer "
+ << pCreateInfo->subresourceRange.baseArrayLayer << " for image "
+ << pCreateInfo->image << " that only has "
+ << imageEntry->second.arraySize << " mip levels.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
}
if (!pCreateInfo->subresourceRange.levelCount) {
std::stringstream ss;
- ss << "vkCreateImageView called with 0 in pCreateInfo->subresourceRange.levelCount.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView called with 0 in "
+ "pCreateInfo->subresourceRange.levelCount.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
}
if (!pCreateInfo->subresourceRange.layerCount) {
std::stringstream ss;
- ss << "vkCreateImageView called with 0 in pCreateInfo->subresourceRange.layerCount.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView called with 0 in "
+ "pCreateInfo->subresourceRange.layerCount.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
}
- VkImageCreateFlags imageFlags = imageEntry->second.flags;
- VkFormat imageFormat = imageEntry->second.format;
- VkFormat ivciFormat = pCreateInfo->format;
- VkImageAspectFlags aspectMask = pCreateInfo->subresourceRange.aspectMask;
+ VkImageCreateFlags imageFlags = imageEntry->second.flags;
+ VkFormat imageFormat = imageEntry->second.format;
+ VkFormat ivciFormat = pCreateInfo->format;
+ VkImageAspectFlags aspectMask =
+ pCreateInfo->subresourceRange.aspectMask;
// Validate VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT state
if (imageFlags & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) {
- // Format MUST be compatible (in the same format compatibility class) as the format the image was created with
- if (vk_format_get_compatibility_class(imageFormat) != vk_format_get_compatibility_class(ivciFormat)) {
+ // Format MUST be compatible (in the same format compatibility
+ // class) as the format the image was created with
+ if (vk_format_get_compatibility_class(imageFormat) !=
+ vk_format_get_compatibility_class(ivciFormat)) {
std::stringstream ss;
- ss << "vkCreateImageView(): ImageView format " << string_VkFormat(ivciFormat) << " is not in the same format compatibility class as image (" <<
- (uint64_t)pCreateInfo->image << ") format " << string_VkFormat(imageFormat) << ". Images created with the VK_IMAGE_CREATE_MUTABLE_FORMAT BIT " <<
- "can support ImageViews with differing formats but they must be in the same compatibility class.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ ss << "vkCreateImageView(): ImageView format "
+ << string_VkFormat(ivciFormat)
+ << " is not in the same format compatibility class as image "
+ "(" << (uint64_t)pCreateInfo->image << ") format "
+ << string_VkFormat(imageFormat)
+ << ". Images created with the "
+ "VK_IMAGE_CREATE_MUTABLE_FORMAT BIT "
+ << "can support ImageViews with differing formats but they "
+ "must be in the same compatibility class.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
}
} else {
// Format MUST be IDENTICAL to the format the image was created with
if (imageFormat != ivciFormat) {
std::stringstream ss;
- ss << "vkCreateImageView() format " << string_VkFormat(ivciFormat) << " differs from image " << (uint64_t)pCreateInfo->image << " format " <<
- string_VkFormat(imageFormat) << ". Formats MUST be IDENTICAL unless VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image creation.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ ss << "vkCreateImageView() format "
+ << string_VkFormat(ivciFormat) << " differs from image "
+ << (uint64_t)pCreateInfo->image << " format "
+ << string_VkFormat(imageFormat)
+ << ". Formats MUST be IDENTICAL unless "
+ "VK_IMAGE_CREATE_MUTABLE_FORMAT BIT was set on image "
+ "creation.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
IMAGE_VIEW_CREATE_ERROR, "IMAGE", "%s", ss.str().c_str());
}
}
- // Validate correct image aspect bits for desired formats and format consistency
+ // Validate correct image aspect bits for desired formats and format
+ // consistency
if (vk_format_is_color(imageFormat)) {
- if ((aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
+ if ((aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) !=
+ VK_IMAGE_ASPECT_COLOR_BIT) {
std::stringstream ss;
- ss << "vkCreateImageView: Color image formats must have the VK_IMAGE_ASPECT_COLOR_BIT set";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: Color image formats must have the "
+ "VK_IMAGE_ASPECT_COLOR_BIT set";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
if ((aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) != aspectMask) {
std::stringstream ss;
- ss << "vkCreateImageView: Color image formats must have ONLY the VK_IMAGE_ASPECT_COLOR_BIT set";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: Color image formats must have ONLY "
+ "the VK_IMAGE_ASPECT_COLOR_BIT set";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
if (VK_FALSE == vk_format_is_color(ivciFormat)) {
std::stringstream ss;
- ss << "vkCreateImageView: The image view's format can differ from the parent image's format, but both must be "
- << "color formats. ImageFormat is " << string_VkFormat(imageFormat) << " ImageViewFormat is " << string_VkFormat(ivciFormat);
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_FORMAT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: The image view's format can differ "
+ "from the parent image's format, but both must be "
+ << "color formats. ImageFormat is "
+ << string_VkFormat(imageFormat) << " ImageViewFormat is "
+ << string_VkFormat(ivciFormat);
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_FORMAT, "IMAGE", "%s", ss.str().c_str());
}
- // TODO: Uncompressed formats are compatible if they occupy they same number of bits per pixel.
- // Compressed formats are compatible if the only difference between them is the numerical type of
- // the uncompressed pixels (e.g. signed vs. unsigned, or sRGB vs. UNORM encoding).
- } else if (vk_format_is_depth_and_stencil(imageFormat)) {
- if ((aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0) {
+ // TODO: Uncompressed formats are compatible if they occupy they
+ // same number of bits per pixel.
+ // Compressed formats are compatible if the only difference
+ // between them is the numerical type of
+ // the uncompressed pixels (e.g. signed vs. unsigned, or sRGB
+ // vs. UNORM encoding).
+ } else if (vk_format_is_depth_and_stencil(imageFormat)) {
+ if ((aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT)) == 0) {
std::stringstream ss;
- ss << "vkCreateImageView: Depth/stencil image formats must have at least one of VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT set";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: Depth/stencil image formats must "
+ "have at least one of VK_IMAGE_ASPECT_DEPTH_BIT and "
+ "VK_IMAGE_ASPECT_STENCIL_BIT set";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
- if ((aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != aspectMask) {
+ if ((aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT)) != aspectMask) {
std::stringstream ss;
- ss << "vkCreateImageView: Combination depth/stencil image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT set";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: Combination depth/stencil image "
+ "formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT and "
+ "VK_IMAGE_ASPECT_STENCIL_BIT set";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
- } else if (vk_format_is_depth_only(imageFormat)) {
- if ((aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) {
+ } else if (vk_format_is_depth_only(imageFormat)) {
+ if ((aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) !=
+ VK_IMAGE_ASPECT_DEPTH_BIT) {
std::stringstream ss;
- ss << "vkCreateImageView: Depth-only image formats must have the VK_IMAGE_ASPECT_DEPTH_BIT set";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: Depth-only image formats must have "
+ "the VK_IMAGE_ASPECT_DEPTH_BIT set";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
if ((aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != aspectMask) {
std::stringstream ss;
- ss << "vkCreateImageView: Depth-only image formats can have only the VK_IMAGE_ASPECT_DEPTH_BIT set";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: Depth-only image formats can have "
+ "only the VK_IMAGE_ASPECT_DEPTH_BIT set";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
- } else if (vk_format_is_stencil_only(imageFormat)) {
- if ((aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT) {
+ } else if (vk_format_is_stencil_only(imageFormat)) {
+ if ((aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) !=
+ VK_IMAGE_ASPECT_STENCIL_BIT) {
std::stringstream ss;
- ss << "vkCreateImageView: Stencil-only image formats must have the VK_IMAGE_ASPECT_STENCIL_BIT set";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: Stencil-only image formats must have "
+ "the VK_IMAGE_ASPECT_STENCIL_BIT set";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
if ((aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != aspectMask) {
std::stringstream ss;
- ss << "vkCreateImageView: Stencil-only image formats can have only the VK_IMAGE_ASPECT_STENCIL_BIT set";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)pCreateInfo->image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCreateImageView: Stencil-only image formats can have "
+ "only the VK_IMAGE_ASPECT_STENCIL_BIT set";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ (uint64_t)pCreateInfo->image, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
}
}
@@ -645,74 +814,81 @@
return VK_ERROR_VALIDATION_FAILED_EXT;
}
- VkResult result = device_data->device_dispatch_table->CreateImageView(device, pCreateInfo, pAllocator, pView);
+ VkResult result = device_data->device_dispatch_table->CreateImageView(
+ device, pCreateInfo, pAllocator, pView);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue *pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange *pRanges)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout,
+ const VkClearColorValue *pColor, uint32_t rangeCount,
+ const VkImageSubresourceRange *pRanges) {
VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
// For each range, image aspect must be color only
for (uint32_t i = 0; i < rangeCount; i++) {
if (pRanges[i].aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) {
- char const str[] = "vkCmdClearColorImage aspectMasks for all subresource ranges must be set to VK_IMAGE_ASPECT_COLOR_BIT";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
+ char const str[] = "vkCmdClearColorImage aspectMasks for all "
+ "subresource ranges must be set to "
+ "VK_IMAGE_ASPECT_COLOR_BIT";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
}
}
if (VK_FALSE == skipCall) {
- device_data->device_dispatch_table->CmdClearColorImage(commandBuffer, image, imageLayout,
- pColor, rangeCount, pRanges);
+ device_data->device_dispatch_table->CmdClearColorImage(
+ commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue *pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange *pRanges)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout,
+ const VkClearDepthStencilValue *pDepthStencil,
+ uint32_t rangeCount,
+ const VkImageSubresourceRange *pRanges) {
VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
// For each range, Image aspect must be depth or stencil or both
for (uint32_t i = 0; i < rangeCount; i++) {
- if (((pRanges[i].aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) &&
- ((pRanges[i].aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT))
- {
- char const str[] = "vkCmdClearDepthStencilImage aspectMasks for all subresource ranges must be "
- "set to VK_IMAGE_ASPECT_DEPTH_BIT and/or VK_IMAGE_ASPECT_STENCIL_BIT";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
+ if (((pRanges[i].aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) !=
+ VK_IMAGE_ASPECT_DEPTH_BIT) &&
+ ((pRanges[i].aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) !=
+ VK_IMAGE_ASPECT_STENCIL_BIT)) {
+ char const str[] = "vkCmdClearDepthStencilImage aspectMasks for "
+ "all subresource ranges must be "
+ "set to VK_IMAGE_ASPECT_DEPTH_BIT and/or "
+ "VK_IMAGE_ASPECT_STENCIL_BIT";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
}
}
if (VK_FALSE == skipCall) {
- device_data->device_dispatch_table->CmdClearDepthStencilImage(commandBuffer,
- image, imageLayout, pDepthStencil, rangeCount, pRanges);
+ device_data->device_dispatch_table->CmdClearDepthStencilImage(
+ commandBuffer, image, imageLayout, pDepthStencil, rangeCount,
+ pRanges);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy *pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageCopy *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
auto srcImageEntry = device_data->imageMap.find(srcImage);
auto dstImageEntry = device_data->imageMap.find(dstImage);
@@ -721,201 +897,262 @@
// For each region, src aspect mask must match dest aspect mask
// For each region, color aspects cannot be mixed with depth/stencil aspects
for (uint32_t i = 0; i < regionCount; i++) {
- if(pRegions[i].srcSubresource.layerCount == 0)
- {
- char const str[] = "vkCmdCopyImage: number of layers in source subresource is zero";
+ if (pRegions[i].srcSubresource.layerCount == 0) {
+ char const str[] = "vkCmdCopyImage: number of layers in source "
+ "subresource is zero";
// TODO: Verify against Valid Use section of spec
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- if(pRegions[i].dstSubresource.layerCount == 0)
- {
- char const str[] = "vkCmdCopyImage: number of layers in destination subresource is zero";
+ if (pRegions[i].dstSubresource.layerCount == 0) {
+ char const str[] = "vkCmdCopyImage: number of layers in "
+ "destination subresource is zero";
// TODO: Verify against Valid Use section of spec
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- if(pRegions[i].srcSubresource.layerCount != pRegions[i].dstSubresource.layerCount)
- {
- char const str[] = "vkCmdCopyImage: number of layers in source and destination subresources must match";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ if (pRegions[i].srcSubresource.layerCount !=
+ pRegions[i].dstSubresource.layerCount) {
+ char const str[] = "vkCmdCopyImage: number of layers in source and "
+ "destination subresources must match";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- if (pRegions[i].srcSubresource.aspectMask != pRegions[i].dstSubresource.aspectMask) {
- char const str[] = "vkCmdCopyImage: Src and dest aspectMasks for each region must match";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ if (pRegions[i].srcSubresource.aspectMask !=
+ pRegions[i].dstSubresource.aspectMask) {
+ char const str[] = "vkCmdCopyImage: Src and dest aspectMasks for "
+ "each region must match";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- if ((pRegions[i].srcSubresource.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) &&
- (pRegions[i].srcSubresource.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
- char const str[] = "vkCmdCopyImage aspectMask cannot specify both COLOR and DEPTH/STENCIL aspects";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
+ if ((pRegions[i].srcSubresource.aspectMask &
+ VK_IMAGE_ASPECT_COLOR_BIT) &&
+ (pRegions[i].srcSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))) {
+ char const str[] = "vkCmdCopyImage aspectMask cannot specify both "
+ "COLOR and DEPTH/STENCIL aspects";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
}
}
- if ((srcImageEntry != device_data->imageMap.end())
- && (dstImageEntry != device_data->imageMap.end())) {
- if (srcImageEntry->second.imageType != dstImageEntry->second.imageType) {
- char const str[] = "vkCmdCopyImage called with unmatched source and dest image types.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_TYPE, "IMAGE", str);
+ if ((srcImageEntry != device_data->imageMap.end()) &&
+ (dstImageEntry != device_data->imageMap.end())) {
+ if (srcImageEntry->second.imageType !=
+ dstImageEntry->second.imageType) {
+ char const str[] = "vkCmdCopyImage called with unmatched source "
+ "and dest image types.";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_TYPE, "IMAGE", str);
}
// Check that format is same size or exact stencil/depth
if (is_depth_format(srcImageEntry->second.format)) {
if (srcImageEntry->second.format != dstImageEntry->second.format) {
- char const str[] = "vkCmdCopyImage called with unmatched source and dest image depth/stencil formats.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str);
+ char const str[] = "vkCmdCopyImage called with unmatched "
+ "source and dest image depth/stencil "
+ "formats.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str);
}
} else {
size_t srcSize = vk_format_get_size(srcImageEntry->second.format);
size_t destSize = vk_format_get_size(dstImageEntry->second.format);
if (srcSize != destSize) {
- char const str[] = "vkCmdCopyImage called with unmatched source and dest image format sizes.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str);
+ char const str[] = "vkCmdCopyImage called with unmatched "
+ "source and dest image format sizes.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str);
}
}
}
if (VK_FALSE == skipCall) {
- device_data->device_dispatch_table->CmdCopyImage(commandBuffer, srcImage,
- srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ device_data->device_dispatch_table->CmdCopyImage(
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions);
}
}
-VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects)
-{
+VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearAttachments(VkCommandBuffer commandBuffer,
+ uint32_t attachmentCount,
+ const VkClearAttachment *pAttachments,
+ uint32_t rectCount, const VkClearRect *pRects) {
VkBool32 skipCall = VK_FALSE;
VkImageAspectFlags aspectMask;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
for (uint32_t i = 0; i < attachmentCount; i++) {
aspectMask = pAttachments[i].aspectMask;
if (aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) {
if (aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) {
- // VK_IMAGE_ASPECT_COLOR_BIT is not the only bit set for this attachment
- char const str[] = "vkCmdClearAttachments aspectMask [%d] must set only VK_IMAGE_ASPECT_COLOR_BIT of a color attachment.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str, i);
+ // VK_IMAGE_ASPECT_COLOR_BIT is not the only bit set for this
+ // attachment
+ char const str[] = "vkCmdClearAttachments aspectMask [%d] must "
+ "set only VK_IMAGE_ASPECT_COLOR_BIT of a "
+ "color attachment.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str, i);
}
} else {
// Image aspect must be depth or stencil or both
- if (((aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) &&
- ((aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT))
- {
- char const str[] = "vkCmdClearAttachments aspectMask [%d] must be set to VK_IMAGE_ASPECT_DEPTH_BIT and/or VK_IMAGE_ASPECT_STENCIL_BIT";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str, i);
+ if (((aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) !=
+ VK_IMAGE_ASPECT_DEPTH_BIT) &&
+ ((aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) !=
+ VK_IMAGE_ASPECT_STENCIL_BIT)) {
+ char const str[] = "vkCmdClearAttachments aspectMask [%d] must "
+ "be set to VK_IMAGE_ASPECT_DEPTH_BIT and/or "
+ "VK_IMAGE_ASPECT_STENCIL_BIT";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str, i);
}
}
}
if (VK_FALSE == skipCall) {
- device_data->device_dispatch_table->CmdClearAttachments(commandBuffer,
- attachmentCount, pAttachments, rectCount, pRects);
+ device_data->device_dispatch_table->CmdClearAttachments(
+ commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy *pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+ uint32_t regionCount,
+ const VkBufferImageCopy *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- // For each region, the number of layers in the image subresource should not be zero
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ // For each region, the number of layers in the image subresource should not
+ // be zero
// Image aspect must be ONE OF color, depth, stencil
for (uint32_t i = 0; i < regionCount; i++) {
- if(pRegions[i].imageSubresource.layerCount == 0)
- {
- char const str[] = "vkCmdCopyImageToBuffer: number of layers in image subresource is zero";
- // TODO: Verify against Valid Use section of spec, if this case yields undefined results, then it's an error
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ if (pRegions[i].imageSubresource.layerCount == 0) {
+ char const str[] = "vkCmdCopyImageToBuffer: number of layers in "
+ "image subresource is zero";
+ // TODO: Verify against Valid Use section of spec, if this case
+ // yields undefined results, then it's an error
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
VkImageAspectFlags aspectMask = pRegions[i].imageSubresource.aspectMask;
if ((aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) &&
(aspectMask != VK_IMAGE_ASPECT_DEPTH_BIT) &&
(aspectMask != VK_IMAGE_ASPECT_STENCIL_BIT)) {
- char const str[] = "vkCmdCopyImageToBuffer: aspectMasks for each region must specify only COLOR or DEPTH or STENCIL";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
+ char const str[] = "vkCmdCopyImageToBuffer: aspectMasks for each "
+ "region must specify only COLOR or DEPTH or "
+ "STENCIL";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
}
}
if (VK_FALSE == skipCall) {
- device_data->device_dispatch_table->CmdCopyImageToBuffer(commandBuffer,
- srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+ device_data->device_dispatch_table->CmdCopyImageToBuffer(
+ commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount,
+ pRegions);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy *pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkImage dstImage, VkImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VkBufferImageCopy *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- // For each region, the number of layers in the image subresource should not be zero
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ // For each region, the number of layers in the image subresource should not
+ // be zero
// Image aspect must be ONE OF color, depth, stencil
for (uint32_t i = 0; i < regionCount; i++) {
- if(pRegions[i].imageSubresource.layerCount == 0)
- {
- char const str[] = "vkCmdCopyBufferToImage: number of layers in image subresource is zero";
- // TODO: Verify against Valid Use section of spec, if this case yields undefined results, then it's an error
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ if (pRegions[i].imageSubresource.layerCount == 0) {
+ char const str[] = "vkCmdCopyBufferToImage: number of layers in "
+ "image subresource is zero";
+ // TODO: Verify against Valid Use section of spec, if this case
+ // yields undefined results, then it's an error
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
VkImageAspectFlags aspectMask = pRegions[i].imageSubresource.aspectMask;
if ((aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) &&
(aspectMask != VK_IMAGE_ASPECT_DEPTH_BIT) &&
(aspectMask != VK_IMAGE_ASPECT_STENCIL_BIT)) {
- char const str[] = "vkCmdCopyBufferToImage: aspectMasks for each region must specify only COLOR or DEPTH or STENCIL";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
+ char const str[] = "vkCmdCopyBufferToImage: aspectMasks for each "
+ "region must specify only COLOR or DEPTH or "
+ "STENCIL";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
}
}
if (VK_FALSE == skipCall) {
- device_data->device_dispatch_table->CmdCopyBufferToImage(commandBuffer,
- srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+ device_data->device_dispatch_table->CmdCopyBufferToImage(
+ commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount,
+ pRegions);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit *pRegions,
- VkFilter filter)
-{
- VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageBlit *pRegions, VkFilter filter) {
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- auto srcImageEntry = device_data->imageMap.find(srcImage);
+ auto srcImageEntry = device_data->imageMap.find(srcImage);
auto dstImageEntry = device_data->imageMap.find(dstImage);
- if ((srcImageEntry != device_data->imageMap.end()) &&
+ if ((srcImageEntry != device_data->imageMap.end()) &&
(dstImageEntry != device_data->imageMap.end())) {
VkFormat srcFormat = srcImageEntry->second.format;
@@ -925,11 +1162,17 @@
if ((vk_format_is_sint(srcFormat) && !vk_format_is_sint(dstFormat)) ||
(vk_format_is_uint(srcFormat) && !vk_format_is_uint(dstFormat))) {
std::stringstream ss;
- ss << "vkCmdBlitImage: If one of srcImage and dstImage images has signed/unsigned integer format, "
- << "the other one must also have signed/unsigned integer format. "
- << "Source format is " << string_VkFormat(srcFormat) << " Destination format is " << string_VkFormat(dstFormat);
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_FORMAT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCmdBlitImage: If one of srcImage and dstImage images has "
+ "signed/unsigned integer format, "
+ << "the other one must also have signed/unsigned integer "
+ "format. "
+ << "Source format is " << string_VkFormat(srcFormat)
+ << " Destination format is " << string_VkFormat(dstFormat);
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_FORMAT,
+ "IMAGE", "%s", ss.str().c_str());
}
// Validate aspect bits and formats for depth/stencil images
@@ -937,70 +1180,118 @@
vk_format_is_depth_or_stencil(dstFormat)) {
if (srcFormat != dstFormat) {
std::stringstream ss;
- ss << "vkCmdBlitImage: If one of srcImage and dstImage images has a format of depth, stencil or depth "
- << "stencil, the other one must have exactly the same format. "
- << "Source format is " << string_VkFormat(srcFormat) << " Destination format is " << string_VkFormat(dstFormat);
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_FORMAT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCmdBlitImage: If one of srcImage and dstImage images "
+ "has a format of depth, stencil or depth "
+ << "stencil, the other one must have exactly the same "
+ "format. "
+ << "Source format is " << string_VkFormat(srcFormat)
+ << " Destination format is " << string_VkFormat(dstFormat);
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_FORMAT,
+ "IMAGE", "%s", ss.str().c_str());
}
for (uint32_t i = 0; i < regionCount; i++) {
- if(pRegions[i].srcSubresource.layerCount == 0)
- {
- char const str[] = "vkCmdBlitImage: number of layers in source subresource is zero";
- // TODO: Verify against Valid Use section of spec, if this case yields undefined results, then it's an error
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ if (pRegions[i].srcSubresource.layerCount == 0) {
+ char const str[] = "vkCmdBlitImage: number of layers in "
+ "source subresource is zero";
+ // TODO: Verify against Valid Use section of spec, if this
+ // case yields undefined results, then it's an error
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- if(pRegions[i].dstSubresource.layerCount == 0)
- {
- char const str[] = "vkCmdBlitImage: number of layers in destination subresource is zero";
- // TODO: Verify against Valid Use section of spec, if this case yields undefined results, then it's an error
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ if (pRegions[i].dstSubresource.layerCount == 0) {
+ char const str[] = "vkCmdBlitImage: number of layers in "
+ "destination subresource is zero";
+ // TODO: Verify against Valid Use section of spec, if this
+ // case yields undefined results, then it's an error
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- if(pRegions[i].srcSubresource.layerCount != pRegions[i].dstSubresource.layerCount)
- {
- char const str[] = "vkCmdBlitImage: number of layers in source and destination subresources must match";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ if (pRegions[i].srcSubresource.layerCount !=
+ pRegions[i].dstSubresource.layerCount) {
+ char const str[] = "vkCmdBlitImage: number of layers in "
+ "source and destination subresources "
+ "must match";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- VkImageAspectFlags srcAspect = pRegions[i].srcSubresource.aspectMask;
- VkImageAspectFlags dstAspect = pRegions[i].dstSubresource.aspectMask;
+ VkImageAspectFlags srcAspect =
+ pRegions[i].srcSubresource.aspectMask;
+ VkImageAspectFlags dstAspect =
+ pRegions[i].dstSubresource.aspectMask;
if (srcAspect != dstAspect) {
std::stringstream ss;
- ss << "vkCmdBlitImage: Image aspects of depth/stencil images should match";
- // TODO: Verify against Valid Use section of spec, if this case yields undefined results, then it's an error
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCmdBlitImage: Image aspects of depth/stencil "
+ "images should match";
+ // TODO: Verify against Valid Use section of spec, if this
+ // case yields undefined results, then it's an error
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
if (vk_format_is_depth_and_stencil(srcFormat)) {
- if ((srcAspect != VK_IMAGE_ASPECT_DEPTH_BIT) && (srcAspect != VK_IMAGE_ASPECT_STENCIL_BIT)) {
+ if ((srcAspect != VK_IMAGE_ASPECT_DEPTH_BIT) &&
+ (srcAspect != VK_IMAGE_ASPECT_STENCIL_BIT)) {
std::stringstream ss;
- ss << "vkCmdBlitImage: Combination depth/stencil image formats must have only one of VK_IMAGE_ASPECT_DEPTH_BIT "
- << "and VK_IMAGE_ASPECT_STENCIL_BIT set in srcImage and dstImage";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCmdBlitImage: Combination depth/stencil image "
+ "formats must have only one of "
+ "VK_IMAGE_ASPECT_DEPTH_BIT "
+ << "and VK_IMAGE_ASPECT_STENCIL_BIT set in srcImage "
+ "and dstImage";
+ skipCall |= log_msg(
+ device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
} else if (vk_format_is_stencil_only(srcFormat)) {
if (srcAspect != VK_IMAGE_ASPECT_STENCIL_BIT) {
std::stringstream ss;
- ss << "vkCmdBlitImage: Stencil-only image formats must have only the VK_IMAGE_ASPECT_STENCIL_BIT "
+ ss << "vkCmdBlitImage: Stencil-only image formats must "
+ "have only the VK_IMAGE_ASPECT_STENCIL_BIT "
<< "set in both the srcImage and dstImage";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ skipCall |= log_msg(
+ device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
} else if (vk_format_is_depth_only(srcFormat)) {
if (srcAspect != VK_IMAGE_ASPECT_DEPTH_BIT) {
std::stringstream ss;
- ss << "vkCmdBlitImage: Depth-only image formats must have only the VK_IMAGE_ASPECT_DEPTH "
+ ss << "vkCmdBlitImage: Depth-only image formats must "
+ "have only the VK_IMAGE_ASPECT_DEPTH "
<< "set in both the srcImage and dstImage";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ skipCall |= log_msg(
+ device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
}
}
@@ -1011,140 +1302,173 @@
vk_format_is_int(srcFormat)) {
if (filter != VK_FILTER_NEAREST) {
std::stringstream ss;
- ss << "vkCmdBlitImage: If the format of srcImage is a depth, stencil, depth stencil or integer-based format "
+ ss << "vkCmdBlitImage: If the format of srcImage is a depth, "
+ "stencil, depth stencil or integer-based format "
<< "then filter must be VK_FILTER_NEAREST.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_FILTER, "IMAGE", "%s", ss.str().c_str());
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_FILTER,
+ "IMAGE", "%s", ss.str().c_str());
}
}
}
- device_data->device_dispatch_table->CmdBlitImage(commandBuffer, srcImage,
- srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+ device_data->device_dispatch_table->CmdBlitImage(
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions, filter);
}
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier *pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier *pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier *pImageMemoryBarriers)
-{
+ VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+ uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier *pImageMemoryBarriers) {
VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i)
- {
- VkImageMemoryBarrier const*const barrier = (VkImageMemoryBarrier const*const) &pImageMemoryBarriers[i];
- if (barrier->sType == VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER)
- {
- if (barrier->subresourceRange.layerCount == 0)
- {
+ for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
+ VkImageMemoryBarrier const *const barrier =
+ (VkImageMemoryBarrier const *const) & pImageMemoryBarriers[i];
+ if (barrier->sType == VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER) {
+ if (barrier->subresourceRange.layerCount == 0) {
std::stringstream ss;
- ss << "vkCmdPipelineBarrier called with 0 in ppMemoryBarriers[" << i << "]->subresourceRange.layerCount.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0,
- 0, __LINE__, IMAGE_INVALID_IMAGE_RESOURCE, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkCmdPipelineBarrier called with 0 in ppMemoryBarriers["
+ << i << "]->subresourceRange.layerCount.";
+ skipCall |= log_msg(device_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ IMAGE_INVALID_IMAGE_RESOURCE, "IMAGE", "%s",
+ ss.str().c_str());
}
}
}
- if (skipCall)
- {
+ if (skipCall) {
return;
}
- device_data->device_dispatch_table->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
- memoryBarrierCount, pMemoryBarriers,
- bufferMemoryBarrierCount, pBufferMemoryBarriers,
- imageMemoryBarrierCount, pImageMemoryBarriers);
+ device_data->device_dispatch_table->CmdPipelineBarrier(
+ commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve *pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageResolve *pRegions) {
VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
auto srcImageEntry = device_data->imageMap.find(srcImage);
auto dstImageEntry = device_data->imageMap.find(dstImage);
- // For each region, the number of layers in the image subresource should not be zero
+ // For each region, the number of layers in the image subresource should not
+ // be zero
// For each region, src and dest image aspect must be color only
for (uint32_t i = 0; i < regionCount; i++) {
- if(pRegions[i].srcSubresource.layerCount == 0)
- {
- char const str[] = "vkCmdResolveImage: number of layers in source subresource is zero";
- // TODO: Verify against Valid Use section of spec. Generally if something yield an undefined result, it's invalid/error
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ if (pRegions[i].srcSubresource.layerCount == 0) {
+ char const str[] = "vkCmdResolveImage: number of layers in source "
+ "subresource is zero";
+ // TODO: Verify against Valid Use section of spec. Generally if
+ // something yield an undefined result, it's invalid/error
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- if(pRegions[i].dstSubresource.layerCount == 0)
- {
- char const str[] = "vkCmdResolveImage: number of layers in destination subresource is zero";
+ if (pRegions[i].dstSubresource.layerCount == 0) {
+ char const str[] = "vkCmdResolveImage: number of layers in "
+ "destination subresource is zero";
- // TODO: Verify against Valid Use section of spec. Generally if something yield an undefined result, it's invalid/error
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
+ // TODO: Verify against Valid Use section of spec. Generally if
+ // something yield an undefined result, it's invalid/error
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_ASPECT, "IMAGE", str);
}
- if ((pRegions[i].srcSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) ||
- (pRegions[i].dstSubresource.aspectMask != VK_IMAGE_ASPECT_COLOR_BIT)) {
- char const str[] = "vkCmdResolveImage: src and dest aspectMasks for each region must specify only VK_IMAGE_ASPECT_COLOR_BIT";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
+ if ((pRegions[i].srcSubresource.aspectMask !=
+ VK_IMAGE_ASPECT_COLOR_BIT) ||
+ (pRegions[i].dstSubresource.aspectMask !=
+ VK_IMAGE_ASPECT_COLOR_BIT)) {
+ char const str[] = "vkCmdResolveImage: src and dest aspectMasks "
+ "for each region must specify only "
+ "VK_IMAGE_ASPECT_COLOR_BIT";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
}
}
- if ((srcImageEntry != device_data->imageMap.end()) &&
+ if ((srcImageEntry != device_data->imageMap.end()) &&
(dstImageEntry != device_data->imageMap.end())) {
if (srcImageEntry->second.format != dstImageEntry->second.format) {
- char const str[] = "vkCmdResolveImage called with unmatched source and dest formats.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str);
+ char const str[] = "vkCmdResolveImage called with unmatched source "
+ "and dest formats.";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_FORMAT, "IMAGE", str);
}
- if (srcImageEntry->second.imageType != dstImageEntry->second.imageType) {
- char const str[] = "vkCmdResolveImage called with unmatched source and dest image types.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_MISMATCHED_IMAGE_TYPE, "IMAGE", str);
+ if (srcImageEntry->second.imageType !=
+ dstImageEntry->second.imageType) {
+ char const str[] = "vkCmdResolveImage called with unmatched source "
+ "and dest image types.";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_MISMATCHED_IMAGE_TYPE, "IMAGE", str);
}
if (srcImageEntry->second.samples == VK_SAMPLE_COUNT_1_BIT) {
- char const str[] = "vkCmdResolveImage called with source sample count less than 2.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_RESOLVE_SAMPLES, "IMAGE", str);
+ char const str[] = "vkCmdResolveImage called with source sample "
+ "count less than 2.";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_RESOLVE_SAMPLES, "IMAGE", str);
}
if (dstImageEntry->second.samples != VK_SAMPLE_COUNT_1_BIT) {
- char const str[] = "vkCmdResolveImage called with dest sample count greater than 1.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- (uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_RESOLVE_SAMPLES, "IMAGE", str);
+ char const str[] = "vkCmdResolveImage called with dest sample "
+ "count greater than 1.";
+ skipCall |=
+ log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ IMAGE_INVALID_RESOLVE_SAMPLES, "IMAGE", str);
}
}
if (VK_FALSE == skipCall) {
- device_data->device_dispatch_table->CmdResolveImage(commandBuffer, srcImage,
- srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ device_data->device_dispatch_table->CmdResolveImage(
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource *pSubresource,
- VkSubresourceLayout *pLayout)
-{
- VkBool32 skipCall = VK_FALSE;
- layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkFormat format;
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetImageSubresourceLayout(VkDevice device, VkImage image,
+ const VkImageSubresource *pSubresource,
+ VkSubresourceLayout *pLayout) {
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkFormat format;
auto imageEntry = device_data->imageMap.find(image);
@@ -1154,75 +1478,89 @@
if (vk_format_is_color(format)) {
if (pSubresource->aspectMask != VK_IMAGE_ASPECT_COLOR_BIT) {
std::stringstream ss;
- ss << "vkGetImageSubresourceLayout: For color formats, the aspectMask field of VkImageSubresource must be VK_IMAGE_ASPECT_COLOR.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkGetImageSubresourceLayout: For color formats, the "
+ "aspectMask field of VkImageSubresource must be "
+ "VK_IMAGE_ASPECT_COLOR.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)image,
+ __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
} else if (vk_format_is_depth_or_stencil(format)) {
if ((pSubresource->aspectMask != VK_IMAGE_ASPECT_DEPTH_BIT) &&
(pSubresource->aspectMask != VK_IMAGE_ASPECT_STENCIL_BIT)) {
std::stringstream ss;
- ss << "vkGetImageSubresourceLayout: For depth/stencil formats, the aspectMask selects either the depth or stencil image aspectMask.";
- skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)image, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
+ ss << "vkGetImageSubresourceLayout: For depth/stencil formats, "
+ "the aspectMask selects either the depth or stencil "
+ "image aspectMask.";
+ skipCall |= log_msg(
+ device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)image,
+ __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", "%s",
+ ss.str().c_str());
}
}
}
if (VK_FALSE == skipCall) {
- device_data->device_dispatch_table->GetImageSubresourceLayout(device,
- image, pSubresource, pLayout);
+ device_data->device_dispatch_table->GetImageSubresourceLayout(
+ device, image, pSubresource, pLayout);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties)
-{
- layer_data *phy_dev_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- phy_dev_data->instance_dispatch_table->GetPhysicalDeviceProperties(physicalDevice, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceProperties *pProperties) {
+ layer_data *phy_dev_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ phy_dev_data->instance_dispatch_table->GetPhysicalDeviceProperties(
+ physicalDevice, pProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char* funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetDeviceProcAddr(VkDevice device, const char *funcName) {
if (!strcmp(funcName, "vkGetDeviceProcAddr"))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
if (!strcmp(funcName, "vkDestroyDevice"))
- return (PFN_vkVoidFunction) vkDestroyDevice;
+ return (PFN_vkVoidFunction)vkDestroyDevice;
if (!strcmp(funcName, "vkCreateImage"))
- return (PFN_vkVoidFunction) vkCreateImage;
+ return (PFN_vkVoidFunction)vkCreateImage;
if (!strcmp(funcName, "vkDestroyImage"))
- return (PFN_vkVoidFunction) vkDestroyImage;
+ return (PFN_vkVoidFunction)vkDestroyImage;
if (!strcmp(funcName, "vkCreateImageView"))
- return (PFN_vkVoidFunction) vkCreateImageView;
+ return (PFN_vkVoidFunction)vkCreateImageView;
if (!strcmp(funcName, "vkCreateRenderPass"))
- return (PFN_vkVoidFunction) vkCreateRenderPass;
+ return (PFN_vkVoidFunction)vkCreateRenderPass;
if (!strcmp(funcName, "vkCmdClearColorImage"))
- return (PFN_vkVoidFunction) vkCmdClearColorImage;
+ return (PFN_vkVoidFunction)vkCmdClearColorImage;
if (!strcmp(funcName, "vkCmdClearDepthStencilImage"))
- return (PFN_vkVoidFunction) vkCmdClearDepthStencilImage;
+ return (PFN_vkVoidFunction)vkCmdClearDepthStencilImage;
if (!strcmp(funcName, "vkCmdClearAttachments"))
- return (PFN_vkVoidFunction) vkCmdClearAttachments;
+ return (PFN_vkVoidFunction)vkCmdClearAttachments;
if (!strcmp(funcName, "vkCmdCopyImage"))
- return (PFN_vkVoidFunction) vkCmdCopyImage;
+ return (PFN_vkVoidFunction)vkCmdCopyImage;
if (!strcmp(funcName, "vkCmdCopyImageToBuffer"))
- return (PFN_vkVoidFunction) vkCmdCopyImageToBuffer;
+ return (PFN_vkVoidFunction)vkCmdCopyImageToBuffer;
if (!strcmp(funcName, "vkCmdCopyBufferToImage"))
- return (PFN_vkVoidFunction) vkCmdCopyBufferToImage;
+ return (PFN_vkVoidFunction)vkCmdCopyBufferToImage;
if (!strcmp(funcName, "vkCmdBlitImage"))
- return (PFN_vkVoidFunction) vkCmdBlitImage;
+ return (PFN_vkVoidFunction)vkCmdBlitImage;
if (!strcmp(funcName, "vkCmdPipelineBarrier"))
- return (PFN_vkVoidFunction) vkCmdPipelineBarrier;
+ return (PFN_vkVoidFunction)vkCmdPipelineBarrier;
if (!strcmp(funcName, "vkCmdResolveImage"))
- return (PFN_vkVoidFunction) vkCmdResolveImage;
+ return (PFN_vkVoidFunction)vkCmdResolveImage;
if (!strcmp(funcName, "vkGetImageSubresourceLayout"))
- return (PFN_vkVoidFunction) vkGetImageSubresourceLayout;
+ return (PFN_vkVoidFunction)vkGetImageSubresourceLayout;
if (device == NULL) {
return NULL;
}
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkLayerDispatchTable* pTable = my_data->device_dispatch_table;
+ VkLayerDispatchTable *pTable = my_data->device_dispatch_table;
{
if (pTable->GetDeviceProcAddr == NULL)
return NULL;
@@ -1230,38 +1568,40 @@
}
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
if (!strcmp(funcName, "vkGetInstanceProcAddr"))
- return (PFN_vkVoidFunction) vkGetInstanceProcAddr;
+ return (PFN_vkVoidFunction)vkGetInstanceProcAddr;
if (!strcmp(funcName, "vkCreateInstance"))
- return (PFN_vkVoidFunction) vkCreateInstance;
+ return (PFN_vkVoidFunction)vkCreateInstance;
if (!strcmp(funcName, "vkDestroyInstance"))
- return (PFN_vkVoidFunction) vkDestroyInstance;
+ return (PFN_vkVoidFunction)vkDestroyInstance;
if (!strcmp(funcName, "vkCreateDevice"))
- return (PFN_vkVoidFunction) vkCreateDevice;
+ return (PFN_vkVoidFunction)vkCreateDevice;
if (!strcmp(funcName, "vkEnumerateInstanceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceLayerProperties;
if (!strcmp(funcName, "vkEnumerateInstanceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceExtensionProperties;
if (!strcmp(funcName, "vkEnumerateDeviceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateDeviceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateDeviceLayerProperties;
if (!strcmp(funcName, "vkEnumerateDeviceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateDeviceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateDeviceExtensionProperties;
if (!strcmp(funcName, "vkGetPhysicalDeviceProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceProperties;
if (instance == NULL) {
return NULL;
}
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- PFN_vkVoidFunction fptr = debug_report_get_instance_proc_addr(my_data->report_data, funcName);
- if(fptr)
+ PFN_vkVoidFunction fptr =
+ debug_report_get_instance_proc_addr(my_data->report_data, funcName);
+ if (fptr)
return fptr;
- VkLayerInstanceDispatchTable* pTable = my_data->instance_dispatch_table;
+ VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
if (pTable->GetInstanceProcAddr == NULL)
return NULL;
return pTable->GetInstanceProcAddr(instance, funcName);
diff --git a/layers/image.h b/layers/image.h
index dbc2d30..e3b2530 100644
--- a/layers/image.h
+++ b/layers/image.h
@@ -37,43 +37,53 @@
#include "vk_layer_logging.h"
// Image ERROR codes
-typedef enum _IMAGE_ERROR
-{
- IMAGE_NONE, // Used for INFO & other non-error messages
- IMAGE_FORMAT_UNSUPPORTED, // Request to create Image or RenderPass with a format that is not supported
- IMAGE_RENDERPASS_INVALID_ATTACHMENT, // Invalid image layouts and/or load/storeOps for an attachment when creating RenderPass
- IMAGE_RENDERPASS_INVALID_DS_ATTACHMENT, // If no depth attachment for a RenderPass, verify that subpass DS attachment is set to UNUSED
- IMAGE_INVALID_IMAGE_ASPECT, // Image aspect mask bits are invalid for this API call
- IMAGE_MISMATCHED_IMAGE_ASPECT, // Image aspect masks for source and dest images do not match
- IMAGE_VIEW_CREATE_ERROR, // Error occurred trying to create Image View
- IMAGE_MISMATCHED_IMAGE_TYPE, // Image types for source and dest images do not match
- IMAGE_MISMATCHED_IMAGE_FORMAT, // Image formats for source and dest images do not match
- IMAGE_INVALID_RESOLVE_SAMPLES, // Image resolve source samples less than two or dest samples greater than one
- IMAGE_INVALID_FORMAT, // Operation specifies an invalid format, or there is a format mismatch
- IMAGE_INVALID_FILTER, // Operation specifies an invalid filter setting
- IMAGE_INVALID_IMAGE_RESOURCE, // Image resource/subresource called with invalid setting
- IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, // Device limits for this format have been exceeded
+typedef enum _IMAGE_ERROR {
+ IMAGE_NONE, // Used for INFO & other non-error messages
+ IMAGE_FORMAT_UNSUPPORTED, // Request to create Image or RenderPass with a
+ // format that is not supported
+ IMAGE_RENDERPASS_INVALID_ATTACHMENT, // Invalid image layouts and/or
+ // load/storeOps for an attachment when
+ // creating RenderPass
+ IMAGE_RENDERPASS_INVALID_DS_ATTACHMENT, // If no depth attachment for a
+ // RenderPass, verify that subpass
+ // DS attachment is set to UNUSED
+ IMAGE_INVALID_IMAGE_ASPECT, // Image aspect mask bits are invalid for this
+ // API call
+ IMAGE_MISMATCHED_IMAGE_ASPECT, // Image aspect masks for source and dest
+ // images do not match
+ IMAGE_VIEW_CREATE_ERROR, // Error occurred trying to create Image View
+ IMAGE_MISMATCHED_IMAGE_TYPE, // Image types for source and dest images do
+ // not match
+ IMAGE_MISMATCHED_IMAGE_FORMAT, // Image formats for source and dest images
+ // do not match
+ IMAGE_INVALID_RESOLVE_SAMPLES, // Image resolve source samples less than two
+ // or dest samples greater than one
+ IMAGE_INVALID_FORMAT, // Operation specifies an invalid format, or there is
+ // a format mismatch
+ IMAGE_INVALID_FILTER, // Operation specifies an invalid filter setting
+ IMAGE_INVALID_IMAGE_RESOURCE, // Image resource/subresource called with
+ // invalid setting
+ IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, // Device limits for this format have
+ // been exceeded
} IMAGE_ERROR;
-typedef struct _IMAGE_STATE
-{
- uint32_t mipLevels;
- uint32_t arraySize;
- VkFormat format;
+typedef struct _IMAGE_STATE {
+ uint32_t mipLevels;
+ uint32_t arraySize;
+ VkFormat format;
VkSampleCountFlagBits samples;
- VkImageType imageType;
- VkExtent3D extent;
- VkImageCreateFlags flags;
- _IMAGE_STATE():mipLevels(0), arraySize(0), format(VK_FORMAT_UNDEFINED), samples(VK_SAMPLE_COUNT_1_BIT), imageType(VK_IMAGE_TYPE_RANGE_SIZE), extent{}, flags(0) {};
- _IMAGE_STATE(const VkImageCreateInfo* pCreateInfo):
- mipLevels(pCreateInfo->mipLevels),
- arraySize(pCreateInfo->arrayLayers),
- format(pCreateInfo->format),
- samples(pCreateInfo->samples),
- imageType(pCreateInfo->imageType),
- extent(pCreateInfo->extent),
- flags(pCreateInfo->flags)
- {};
+ VkImageType imageType;
+ VkExtent3D extent;
+ VkImageCreateFlags flags;
+ _IMAGE_STATE()
+ : mipLevels(0), arraySize(0), format(VK_FORMAT_UNDEFINED),
+ samples(VK_SAMPLE_COUNT_1_BIT), imageType(VK_IMAGE_TYPE_RANGE_SIZE),
+ extent{}, flags(0){};
+ _IMAGE_STATE(const VkImageCreateInfo *pCreateInfo)
+ : mipLevels(pCreateInfo->mipLevels),
+ arraySize(pCreateInfo->arrayLayers), format(pCreateInfo->format),
+ samples(pCreateInfo->samples), imageType(pCreateInfo->imageType),
+ extent(pCreateInfo->extent), flags(pCreateInfo->flags){};
} IMAGE_STATE;
#endif // IMAGE_H
diff --git a/layers/mem_tracker.cpp b/layers/mem_tracker.cpp
index 3ba8426..c840d5d 100644
--- a/layers/mem_tracker.cpp
+++ b/layers/mem_tracker.cpp
@@ -54,134 +54,114 @@
#include "vk_layer_logging.h"
static LOADER_PLATFORM_THREAD_ONCE_DECLARATION(g_initOnce);
-// WSI Image Objects bypass usual Image Object creation methods. A special Memory
+// WSI Image Objects bypass usual Image Object creation methods. A special
+// Memory
// Object value will be used to identify them internally.
-static const VkDeviceMemory MEMTRACKER_SWAP_CHAIN_IMAGE_KEY = (VkDeviceMemory)(-1);
+static const VkDeviceMemory MEMTRACKER_SWAP_CHAIN_IMAGE_KEY =
+ (VkDeviceMemory)(-1);
struct layer_data {
- debug_report_data *report_data;
- std::vector<VkDebugReportCallbackEXT> logging_callback;
- VkLayerDispatchTable *device_dispatch_table;
- VkLayerInstanceDispatchTable *instance_dispatch_table;
- VkBool32 wsi_enabled;
- uint64_t currentFenceId;
- VkPhysicalDeviceProperties properties;
- unordered_map<VkDeviceMemory, vector<MEMORY_RANGE>> bufferRanges, imageRanges;
+ debug_report_data *report_data;
+ std::vector<VkDebugReportCallbackEXT> logging_callback;
+ VkLayerDispatchTable *device_dispatch_table;
+ VkLayerInstanceDispatchTable *instance_dispatch_table;
+ VkBool32 wsi_enabled;
+ uint64_t currentFenceId;
+ VkPhysicalDeviceProperties properties;
+ unordered_map<VkDeviceMemory, vector<MEMORY_RANGE>> bufferRanges,
+ imageRanges;
// Maps for tracking key structs related to MemTracker state
- unordered_map<VkCommandBuffer, MT_CB_INFO> cbMap;
- unordered_map<VkCommandPool, MT_CMD_POOL_INFO> commandPoolMap;
- unordered_map<VkDeviceMemory, MT_MEM_OBJ_INFO> memObjMap;
- unordered_map<VkFence, MT_FENCE_INFO> fenceMap;
- unordered_map<VkQueue, MT_QUEUE_INFO> queueMap;
- unordered_map<VkSwapchainKHR, MT_SWAP_CHAIN_INFO*> swapchainMap;
- unordered_map<VkSemaphore, MtSemaphoreState> semaphoreMap;
- unordered_map<VkFramebuffer, MT_FB_INFO> fbMap;
- unordered_map<VkRenderPass, MT_PASS_INFO> passMap;
- unordered_map<VkImageView, MT_IMAGE_VIEW_INFO> imageViewMap;
- // Images and Buffers are 2 objects that can have memory bound to them so they get special treatment
- unordered_map<uint64_t, MT_OBJ_BINDING_INFO> imageMap;
- unordered_map<uint64_t, MT_OBJ_BINDING_INFO> bufferMap;
+ unordered_map<VkCommandBuffer, MT_CB_INFO> cbMap;
+ unordered_map<VkCommandPool, MT_CMD_POOL_INFO> commandPoolMap;
+ unordered_map<VkDeviceMemory, MT_MEM_OBJ_INFO> memObjMap;
+ unordered_map<VkFence, MT_FENCE_INFO> fenceMap;
+ unordered_map<VkQueue, MT_QUEUE_INFO> queueMap;
+ unordered_map<VkSwapchainKHR, MT_SWAP_CHAIN_INFO *> swapchainMap;
+ unordered_map<VkSemaphore, MtSemaphoreState> semaphoreMap;
+ unordered_map<VkFramebuffer, MT_FB_INFO> fbMap;
+ unordered_map<VkRenderPass, MT_PASS_INFO> passMap;
+ unordered_map<VkImageView, MT_IMAGE_VIEW_INFO> imageViewMap;
+ // Images and Buffers are 2 objects that can have memory bound to them so
+ // they get special treatment
+ unordered_map<uint64_t, MT_OBJ_BINDING_INFO> imageMap;
+ unordered_map<uint64_t, MT_OBJ_BINDING_INFO> bufferMap;
- layer_data() :
- report_data(nullptr),
- device_dispatch_table(nullptr),
- instance_dispatch_table(nullptr),
- wsi_enabled(VK_FALSE),
- currentFenceId(1)
- {};
+ layer_data()
+ : report_data(nullptr), device_dispatch_table(nullptr),
+ instance_dispatch_table(nullptr), wsi_enabled(VK_FALSE),
+ currentFenceId(1){};
};
static unordered_map<void *, layer_data *> layer_data_map;
static VkPhysicalDeviceMemoryProperties memProps;
-static VkBool32 clear_cmd_buf_and_mem_references(layer_data* my_data, const VkCommandBuffer cb);
+static VkBool32 clear_cmd_buf_and_mem_references(layer_data *my_data,
+ const VkCommandBuffer cb);
-// TODO : This can be much smarter, using separate locks for separate global data
+// TODO : This can be much smarter, using separate locks for separate global
+// data
static int globalLockInitialized = 0;
static loader_platform_thread_mutex globalLock;
#define MAX_BINDING 0xFFFFFFFF
-static MT_OBJ_BINDING_INFO*
- get_object_binding_info(
- layer_data *my_data,
- uint64_t handle,
- VkDebugReportObjectTypeEXT type)
-{
- MT_OBJ_BINDING_INFO* retValue = NULL;
- switch (type)
- {
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- {
- auto it = my_data->imageMap.find(handle);
- if (it != my_data->imageMap.end())
- return &(*it).second;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- {
- auto it = my_data->bufferMap.find(handle);
- if (it != my_data->bufferMap.end())
- return &(*it).second;
- break;
- }
+static MT_OBJ_BINDING_INFO *
+get_object_binding_info(layer_data *my_data, uint64_t handle,
+ VkDebugReportObjectTypeEXT type) {
+ MT_OBJ_BINDING_INFO *retValue = NULL;
+ switch (type) {
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
+ auto it = my_data->imageMap.find(handle);
+ if (it != my_data->imageMap.end())
+ return &(*it).second;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
+ auto it = my_data->bufferMap.find(handle);
+ if (it != my_data->bufferMap.end())
+ return &(*it).second;
+ break;
+ }
}
return retValue;
}
-template layer_data *get_my_data_ptr<layer_data>(
- void *data_key,
- std::unordered_map<void *, layer_data *> &data_map);
+template layer_data *
+get_my_data_ptr<layer_data>(void *data_key,
+ std::unordered_map<void *, layer_data *> &data_map);
// Add new queue for this device to map container
-static void
-add_queue_info(
- layer_data *my_data,
- const VkQueue queue)
-{
- MT_QUEUE_INFO* pInfo = &my_data->queueMap[queue];
- pInfo->lastRetiredId = 0;
+static void add_queue_info(layer_data *my_data, const VkQueue queue) {
+ MT_QUEUE_INFO *pInfo = &my_data->queueMap[queue];
+ pInfo->lastRetiredId = 0;
pInfo->lastSubmittedId = 0;
}
-static void
-delete_queue_info_list(
- layer_data* my_data)
-{
+static void delete_queue_info_list(layer_data *my_data) {
// Process queue list, cleaning up each entry before deleting
my_data->queueMap.clear();
}
-static void
-add_swap_chain_info(
- layer_data *my_data,
- const VkSwapchainKHR swapchain,
- const VkSwapchainCreateInfoKHR *pCI)
-{
- MT_SWAP_CHAIN_INFO* pInfo = new MT_SWAP_CHAIN_INFO;
+static void add_swap_chain_info(layer_data *my_data,
+ const VkSwapchainKHR swapchain,
+ const VkSwapchainCreateInfoKHR *pCI) {
+ MT_SWAP_CHAIN_INFO *pInfo = new MT_SWAP_CHAIN_INFO;
memcpy(&pInfo->createInfo, pCI, sizeof(VkSwapchainCreateInfoKHR));
my_data->swapchainMap[swapchain] = pInfo;
}
// Add new CBInfo for this cb to map container
-static void
-add_cmd_buf_info(
- layer_data *my_data,
- VkCommandPool commandPool,
- const VkCommandBuffer cb)
-{
+static void add_cmd_buf_info(layer_data *my_data, VkCommandPool commandPool,
+ const VkCommandBuffer cb) {
my_data->cbMap[cb].commandBuffer = cb;
my_data->commandPoolMap[commandPool].pCommandBuffers.push_front(cb);
}
// Delete CBInfo from container and clear mem references to CB
-static VkBool32
-delete_cmd_buf_info(
- layer_data *my_data,
- VkCommandPool commandPool,
- const VkCommandBuffer cb)
-{
+static VkBool32 delete_cmd_buf_info(layer_data *my_data,
+ VkCommandPool commandPool,
+ const VkCommandBuffer cb) {
VkBool32 result = VK_TRUE;
result = clear_cmd_buf_and_mem_references(my_data, cb);
// Delete the CBInfo info
@@ -193,11 +173,8 @@
}
// Return ptr to Info in CB map, or NULL if not found
-static MT_CB_INFO*
-get_cmd_buf_info(
- layer_data *my_data,
- const VkCommandBuffer cb)
-{
+static MT_CB_INFO *get_cmd_buf_info(layer_data *my_data,
+ const VkCommandBuffer cb) {
auto item = my_data->cbMap.find(cb);
if (item != my_data->cbMap.end()) {
return &(*item).second;
@@ -206,95 +183,87 @@
}
}
-static void
-add_object_binding_info(
- layer_data *my_data,
- const uint64_t handle,
- const VkDebugReportObjectTypeEXT type,
- const VkDeviceMemory mem)
-{
- switch (type)
- {
- // Buffers and images are unique as their CreateInfo is in container struct
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- {
- auto pCI = &my_data->bufferMap[handle];
- pCI->mem = mem;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- {
- auto pCI = &my_data->imageMap[handle];
- pCI->mem = mem;
- break;
- }
+static void add_object_binding_info(layer_data *my_data, const uint64_t handle,
+ const VkDebugReportObjectTypeEXT type,
+ const VkDeviceMemory mem) {
+ switch (type) {
+ // Buffers and images are unique as their CreateInfo is in container struct
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
+ auto pCI = &my_data->bufferMap[handle];
+ pCI->mem = mem;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
+ auto pCI = &my_data->imageMap[handle];
+ pCI->mem = mem;
+ break;
+ }
}
}
-static void
-add_object_create_info(
- layer_data *my_data,
- const uint64_t handle,
- const VkDebugReportObjectTypeEXT type,
- const void *pCreateInfo)
-{
- // TODO : For any CreateInfo struct that has ptrs, need to deep copy them and appropriately clean up on Destroy
- switch (type)
- {
- // Buffers and images are unique as their CreateInfo is in container struct
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- {
- auto pCI = &my_data->bufferMap[handle];
- memset(pCI, 0, sizeof(MT_OBJ_BINDING_INFO));
- memcpy(&pCI->create_info.buffer, pCreateInfo, sizeof(VkBufferCreateInfo));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- {
- auto pCI = &my_data->imageMap[handle];
- memset(pCI, 0, sizeof(MT_OBJ_BINDING_INFO));
- memcpy(&pCI->create_info.image, pCreateInfo, sizeof(VkImageCreateInfo));
- break;
- }
- // Swap Chain is very unique, use my_data->imageMap, but copy in
- // SwapChainCreatInfo's usage flags and set the mem value to a unique key. These is used by
- // vkCreateImageView and internal MemTracker routines to distinguish swap chain images
- case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
- {
- auto pCI = &my_data->imageMap[handle];
- memset(pCI, 0, sizeof(MT_OBJ_BINDING_INFO));
- pCI->mem = MEMTRACKER_SWAP_CHAIN_IMAGE_KEY;
- pCI->valid = false;
- pCI->create_info.image.usage =
- const_cast<VkSwapchainCreateInfoKHR*>(static_cast<const VkSwapchainCreateInfoKHR *>(pCreateInfo))->imageUsage;
- break;
- }
+static void add_object_create_info(layer_data *my_data, const uint64_t handle,
+ const VkDebugReportObjectTypeEXT type,
+ const void *pCreateInfo) {
+ // TODO : For any CreateInfo struct that has ptrs, need to deep copy them
+ // and appropriately clean up on Destroy
+ switch (type) {
+ // Buffers and images are unique as their CreateInfo is in container struct
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
+ auto pCI = &my_data->bufferMap[handle];
+ memset(pCI, 0, sizeof(MT_OBJ_BINDING_INFO));
+ memcpy(&pCI->create_info.buffer, pCreateInfo,
+ sizeof(VkBufferCreateInfo));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
+ auto pCI = &my_data->imageMap[handle];
+ memset(pCI, 0, sizeof(MT_OBJ_BINDING_INFO));
+ memcpy(&pCI->create_info.image, pCreateInfo, sizeof(VkImageCreateInfo));
+ break;
+ }
+ // Swap Chain is very unique, use my_data->imageMap, but copy in
+ // SwapChainCreatInfo's usage flags and set the mem value to a unique key.
+ // These is used by
+ // vkCreateImageView and internal MemTracker routines to distinguish swap
+ // chain images
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT: {
+ auto pCI = &my_data->imageMap[handle];
+ memset(pCI, 0, sizeof(MT_OBJ_BINDING_INFO));
+ pCI->mem = MEMTRACKER_SWAP_CHAIN_IMAGE_KEY;
+ pCI->valid = false;
+ pCI->create_info.image.usage =
+ const_cast<VkSwapchainCreateInfoKHR *>(
+ static_cast<const VkSwapchainCreateInfoKHR *>(pCreateInfo))
+ ->imageUsage;
+ break;
+ }
}
}
// Add a fence, creating one if necessary to our list of fences/fenceIds
-static VkBool32
-add_fence_info(
- layer_data *my_data,
- VkFence fence,
- VkQueue queue,
- uint64_t *fenceId)
-{
+static VkBool32 add_fence_info(layer_data *my_data, VkFence fence,
+ VkQueue queue, uint64_t *fenceId) {
VkBool32 skipCall = VK_FALSE;
*fenceId = my_data->currentFenceId++;
// If no fence, create an internal fence to track the submissions
if (fence != VK_NULL_HANDLE) {
my_data->fenceMap[fence].fenceId = *fenceId;
- my_data->fenceMap[fence].queue = queue;
+ my_data->fenceMap[fence].queue = queue;
// Validate that fence is in UNSIGNALED state
- VkFenceCreateInfo* pFenceCI = &(my_data->fenceMap[fence].createInfo);
+ VkFenceCreateInfo *pFenceCI = &(my_data->fenceMap[fence].createInfo);
if (pFenceCI->flags & VK_FENCE_CREATE_SIGNALED_BIT) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, (uint64_t) fence, __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
- "Fence %#" PRIxLEAST64 " submitted in SIGNALED state. Fences must be reset before being submitted", (uint64_t) fence);
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, (uint64_t)fence,
+ __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
+ "Fence %#" PRIxLEAST64 " submitted in SIGNALED state. Fences "
+ "must be reset before being submitted",
+ (uint64_t)fence);
}
} else {
- // TODO : Do we need to create an internal fence here for tracking purposes?
+ // TODO : Do we need to create an internal fence here for tracking
+ // purposes?
}
// Update most recently submitted fence and fenceId for Queue
my_data->queueMap[queue].lastSubmittedId = *fenceId;
@@ -302,20 +271,12 @@
}
// Remove a fenceInfo from our list of fences/fenceIds
-static void
-delete_fence_info(
- layer_data *my_data,
- VkFence fence)
-{
+static void delete_fence_info(layer_data *my_data, VkFence fence) {
my_data->fenceMap.erase(fence);
}
// Record information when a fence is known to be signalled
-static void
-update_fence_tracking(
- layer_data *my_data,
- VkFence fence)
-{
+static void update_fence_tracking(layer_data *my_data, VkFence fence) {
auto fence_item = my_data->fenceMap.find(fence);
if (fence_item != my_data->fenceMap.end()) {
MT_FENCE_INFO *pCurFenceInfo = &(*fence_item).second;
@@ -331,30 +292,26 @@
// Update fence state in fenceCreateInfo structure
auto pFCI = &(my_data->fenceMap[fence].createInfo);
- pFCI->flags = static_cast<VkFenceCreateFlags>(pFCI->flags | VK_FENCE_CREATE_SIGNALED_BIT);
+ pFCI->flags = static_cast<VkFenceCreateFlags>(pFCI->flags |
+ VK_FENCE_CREATE_SIGNALED_BIT);
}
-// Helper routine that updates the fence list for a specific queue to all-retired
-static void
-retire_queue_fences(
- layer_data *my_data,
- VkQueue queue)
-{
+// Helper routine that updates the fence list for a specific queue to
+// all-retired
+static void retire_queue_fences(layer_data *my_data, VkQueue queue) {
MT_QUEUE_INFO *pQueueInfo = &my_data->queueMap[queue];
// Set queue's lastRetired to lastSubmitted indicating all fences completed
pQueueInfo->lastRetiredId = pQueueInfo->lastSubmittedId;
}
// Helper routine that updates all queues to all-retired
-static void
-retire_device_fences(
- layer_data *my_data,
- VkDevice device)
-{
+static void retire_device_fences(layer_data *my_data, VkDevice device) {
// Process each queue for device
// TODO: Add multiple device support
- for (auto ii=my_data->queueMap.begin(); ii!=my_data->queueMap.end(); ++ii) {
- // Set queue's lastRetired to lastSubmitted indicating all fences completed
+ for (auto ii = my_data->queueMap.begin(); ii != my_data->queueMap.end();
+ ++ii) {
+ // Set queue's lastRetired to lastSubmitted indicating all fences
+ // completed
MT_QUEUE_INFO *pQueueInfo = &(*ii).second;
pQueueInfo->lastRetiredId = pQueueInfo->lastSubmittedId;
}
@@ -364,84 +321,75 @@
// Verify that (actual & desired) flags != 0 or,
// if strict is true, verify that (actual & desired) flags == desired
// In case of error, report it via dbg callbacks
-static VkBool32
-validate_usage_flags(
- layer_data *my_data,
- void *disp_obj,
- VkFlags actual,
- VkFlags desired,
- VkBool32 strict,
- uint64_t obj_handle,
- VkDebugReportObjectTypeEXT obj_type,
- char const *ty_str,
- char const *func_name,
- char const *usage_str)
-{
+static VkBool32 validate_usage_flags(layer_data *my_data, void *disp_obj,
+ VkFlags actual, VkFlags desired,
+ VkBool32 strict, uint64_t obj_handle,
+ VkDebugReportObjectTypeEXT obj_type,
+ char const *ty_str, char const *func_name,
+ char const *usage_str) {
VkBool32 correct_usage = VK_FALSE;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
if (strict)
correct_usage = ((actual & desired) == desired);
else
correct_usage = ((actual & desired) != 0);
if (!correct_usage) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, obj_type, obj_handle, __LINE__, MEMTRACK_INVALID_USAGE_FLAG, "MEM",
- "Invalid usage flag for %s %#" PRIxLEAST64 " used by %s. In this case, %s should have %s set during creation.",
- ty_str, obj_handle, func_name, ty_str, usage_str);
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, obj_type,
+ obj_handle, __LINE__, MEMTRACK_INVALID_USAGE_FLAG, "MEM",
+ "Invalid usage flag for %s %#" PRIxLEAST64
+ " used by %s. In this case, %s should have %s set during creation.",
+ ty_str, obj_handle, func_name, ty_str, usage_str);
}
return skipCall;
}
// Helper function to validate usage flags for images
-// Pulls image info and then sends actual vs. desired usage off to helper above where
+// Pulls image info and then sends actual vs. desired usage off to helper above
+// where
// an error will be flagged if usage is not correct
-static VkBool32
-validate_image_usage_flags(
- layer_data *my_data,
- void *disp_obj,
- VkImage image,
- VkFlags desired,
- VkBool32 strict,
- char const *func_name,
- char const *usage_string)
-{
+static VkBool32 validate_image_usage_flags(layer_data *my_data, void *disp_obj,
+ VkImage image, VkFlags desired,
+ VkBool32 strict,
+ char const *func_name,
+ char const *usage_string) {
VkBool32 skipCall = VK_FALSE;
- MT_OBJ_BINDING_INFO* pBindInfo = get_object_binding_info(my_data, (uint64_t)image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
+ MT_OBJ_BINDING_INFO *pBindInfo = get_object_binding_info(
+ my_data, (uint64_t)image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
if (pBindInfo) {
- skipCall = validate_usage_flags(my_data, disp_obj, pBindInfo->create_info.image.usage, desired, strict,
- (uint64_t) image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, "image", func_name, usage_string);
+ skipCall = validate_usage_flags(
+ my_data, disp_obj, pBindInfo->create_info.image.usage, desired,
+ strict, (uint64_t)image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ "image", func_name, usage_string);
}
return skipCall;
}
// Helper function to validate usage flags for buffers
-// Pulls buffer info and then sends actual vs. desired usage off to helper above where
+// Pulls buffer info and then sends actual vs. desired usage off to helper above
+// where
// an error will be flagged if usage is not correct
-static VkBool32
-validate_buffer_usage_flags(
- layer_data *my_data,
- void *disp_obj,
- VkBuffer buffer,
- VkFlags desired,
- VkBool32 strict,
- char const *func_name,
- char const *usage_string)
-{
+static VkBool32 validate_buffer_usage_flags(layer_data *my_data, void *disp_obj,
+ VkBuffer buffer, VkFlags desired,
+ VkBool32 strict,
+ char const *func_name,
+ char const *usage_string) {
VkBool32 skipCall = VK_FALSE;
- MT_OBJ_BINDING_INFO* pBindInfo = get_object_binding_info(my_data, (uint64_t) buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
+ MT_OBJ_BINDING_INFO *pBindInfo = get_object_binding_info(
+ my_data, (uint64_t)buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
if (pBindInfo) {
- skipCall = validate_usage_flags(my_data, disp_obj, pBindInfo->create_info.buffer.usage, desired, strict,
- (uint64_t) buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, "buffer", func_name, usage_string);
+ skipCall = validate_usage_flags(
+ my_data, disp_obj, pBindInfo->create_info.buffer.usage, desired,
+ strict, (uint64_t)buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ "buffer", func_name, usage_string);
}
return skipCall;
}
// Return ptr to info in map container containing mem, or NULL if not found
// Calls to this function should be wrapped in mutex
-static MT_MEM_OBJ_INFO*
-get_mem_obj_info(
- layer_data *my_data,
- const VkDeviceMemory mem)
-{
+static MT_MEM_OBJ_INFO *get_mem_obj_info(layer_data *my_data,
+ const VkDeviceMemory mem) {
auto item = my_data->memObjMap.find(mem);
if (item != my_data->memObjMap.end()) {
return &(*item).second;
@@ -450,51 +398,62 @@
}
}
-static void
-add_mem_obj_info(
- layer_data *my_data,
- void *object,
- const VkDeviceMemory mem,
- const VkMemoryAllocateInfo *pAllocateInfo)
-{
+static void add_mem_obj_info(layer_data *my_data, void *object,
+ const VkDeviceMemory mem,
+ const VkMemoryAllocateInfo *pAllocateInfo) {
assert(object != NULL);
- memcpy(&my_data->memObjMap[mem].allocInfo, pAllocateInfo, sizeof(VkMemoryAllocateInfo));
- // TODO: Update for real hardware, actually process allocation info structures
+ memcpy(&my_data->memObjMap[mem].allocInfo, pAllocateInfo,
+ sizeof(VkMemoryAllocateInfo));
+ // TODO: Update for real hardware, actually process allocation info
+ // structures
my_data->memObjMap[mem].allocInfo.pNext = NULL;
- my_data->memObjMap[mem].object = object;
- my_data->memObjMap[mem].refCount = 0;
- my_data->memObjMap[mem].mem = mem;
+ my_data->memObjMap[mem].object = object;
+ my_data->memObjMap[mem].refCount = 0;
+ my_data->memObjMap[mem].mem = mem;
my_data->memObjMap[mem].memRange.offset = 0;
- my_data->memObjMap[mem].memRange.size = 0;
- my_data->memObjMap[mem].pData = 0;
- my_data->memObjMap[mem].pDriverData = 0;
- my_data->memObjMap[mem].valid = false;
+ my_data->memObjMap[mem].memRange.size = 0;
+ my_data->memObjMap[mem].pData = 0;
+ my_data->memObjMap[mem].pDriverData = 0;
+ my_data->memObjMap[mem].valid = false;
}
-static VkBool32 validate_memory_is_valid(layer_data *my_data, VkDeviceMemory mem, const char* functionName, VkImage image = VK_NULL_HANDLE) {
+static VkBool32 validate_memory_is_valid(layer_data *my_data,
+ VkDeviceMemory mem,
+ const char *functionName,
+ VkImage image = VK_NULL_HANDLE) {
if (mem == MEMTRACKER_SWAP_CHAIN_IMAGE_KEY) {
- MT_OBJ_BINDING_INFO* pBindInfo = get_object_binding_info(my_data, (uint64_t)(image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
+ MT_OBJ_BINDING_INFO *pBindInfo = get_object_binding_info(
+ my_data, (uint64_t)(image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
if (pBindInfo && !pBindInfo->valid) {
- return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- (uint64_t)(mem), __LINE__, MEMTRACK_INVALID_USAGE_FLAG, "MEM",
- "%s: Cannot read invalid swapchain image %" PRIx64 ", please fill the memory before using.", functionName, (uint64_t)(image));
+ return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)(mem), __LINE__,
+ MEMTRACK_INVALID_USAGE_FLAG, "MEM",
+ "%s: Cannot read invalid swapchain image %" PRIx64
+ ", please fill the memory before using.",
+ functionName, (uint64_t)(image));
}
- }
- else {
+ } else {
MT_MEM_OBJ_INFO *pMemObj = get_mem_obj_info(my_data, mem);
if (pMemObj && !pMemObj->valid) {
- return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- (uint64_t)(mem), __LINE__, MEMTRACK_INVALID_USAGE_FLAG, "MEM",
- "%s: Cannot read invalid memory %" PRIx64 ", please fill the memory before using.", functionName, (uint64_t)(mem));
+ return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)(mem), __LINE__,
+ MEMTRACK_INVALID_USAGE_FLAG, "MEM",
+ "%s: Cannot read invalid memory %" PRIx64
+ ", please fill the memory before using.",
+ functionName, (uint64_t)(mem));
}
}
return false;
}
-static void set_memory_valid(layer_data *my_data, VkDeviceMemory mem, bool valid, VkImage image = VK_NULL_HANDLE) {
+static void set_memory_valid(layer_data *my_data, VkDeviceMemory mem,
+ bool valid, VkImage image = VK_NULL_HANDLE) {
if (mem == MEMTRACKER_SWAP_CHAIN_IMAGE_KEY) {
- MT_OBJ_BINDING_INFO* pBindInfo = get_object_binding_info(my_data, (uint64_t)(image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
+ MT_OBJ_BINDING_INFO *pBindInfo = get_object_binding_info(
+ my_data, (uint64_t)(image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
if (pBindInfo) {
pBindInfo->valid = valid;
}
@@ -508,25 +467,24 @@
// Find CB Info and add mem reference to list container
// Find Mem Obj Info and add CB reference to list container
-static VkBool32
-update_cmd_buf_and_mem_references(
- layer_data *my_data,
- const VkCommandBuffer cb,
- const VkDeviceMemory mem,
- const char *apiName)
-{
+static VkBool32 update_cmd_buf_and_mem_references(layer_data *my_data,
+ const VkCommandBuffer cb,
+ const VkDeviceMemory mem,
+ const char *apiName) {
VkBool32 skipCall = VK_FALSE;
// Skip validation if this image was created through WSI
if (mem != MEMTRACKER_SWAP_CHAIN_IMAGE_KEY) {
// First update CB binding in MemObj mini CB list
- MT_MEM_OBJ_INFO* pMemInfo = get_mem_obj_info(my_data, mem);
+ MT_MEM_OBJ_INFO *pMemInfo = get_mem_obj_info(my_data, mem);
if (pMemInfo) {
// Search for cmd buffer object in memory object's binding list
- VkBool32 found = VK_FALSE;
+ VkBool32 found = VK_FALSE;
if (pMemInfo->pCommandBufferBindings.size() > 0) {
- for (list<VkCommandBuffer>::iterator it = pMemInfo->pCommandBufferBindings.begin(); it != pMemInfo->pCommandBufferBindings.end(); ++it) {
+ for (list<VkCommandBuffer>::iterator it =
+ pMemInfo->pCommandBufferBindings.begin();
+ it != pMemInfo->pCommandBufferBindings.end(); ++it) {
if ((*it) == cb) {
found = VK_TRUE;
break;
@@ -539,13 +497,15 @@
pMemInfo->refCount++;
}
// Now update CBInfo's Mem reference list
- MT_CB_INFO* pCBInfo = get_cmd_buf_info(my_data, cb);
- // TODO: keep track of all destroyed CBs so we know if this is a stale or simply invalid object
+ MT_CB_INFO *pCBInfo = get_cmd_buf_info(my_data, cb);
+ // TODO: keep track of all destroyed CBs so we know if this is a
+ // stale or simply invalid object
if (pCBInfo) {
// Search for memory object in cmd buffer's reference list
- VkBool32 found = VK_FALSE;
+ VkBool32 found = VK_FALSE;
if (pCBInfo->pMemObjList.size() > 0) {
- for (auto it = pCBInfo->pMemObjList.begin(); it != pCBInfo->pMemObjList.end(); ++it) {
+ for (auto it = pCBInfo->pMemObjList.begin();
+ it != pCBInfo->pMemObjList.end(); ++it) {
if ((*it) == mem) {
found = VK_TRUE;
break;
@@ -563,18 +523,16 @@
}
// Free bindings related to CB
-static VkBool32
-clear_cmd_buf_and_mem_references(
- layer_data *my_data,
- const VkCommandBuffer cb)
-{
+static VkBool32 clear_cmd_buf_and_mem_references(layer_data *my_data,
+ const VkCommandBuffer cb) {
VkBool32 skipCall = VK_FALSE;
- MT_CB_INFO* pCBInfo = get_cmd_buf_info(my_data, cb);
+ MT_CB_INFO *pCBInfo = get_cmd_buf_info(my_data, cb);
if (pCBInfo && (pCBInfo->pMemObjList.size() > 0)) {
list<VkDeviceMemory> mem_obj_list = pCBInfo->pMemObjList;
- for (list<VkDeviceMemory>::iterator it=mem_obj_list.begin(); it!=mem_obj_list.end(); ++it) {
- MT_MEM_OBJ_INFO* pInfo = get_mem_obj_info(my_data, *it);
+ for (list<VkDeviceMemory>::iterator it = mem_obj_list.begin();
+ it != mem_obj_list.end(); ++it) {
+ MT_MEM_OBJ_INFO *pInfo = get_mem_obj_info(my_data, *it);
if (pInfo) {
pInfo->pCommandBufferBindings.remove(cb);
pInfo->refCount--;
@@ -587,12 +545,11 @@
}
// Delete the entire CB list
-static VkBool32
-delete_cmd_buf_info_list(
- layer_data* my_data)
-{
+static VkBool32 delete_cmd_buf_info_list(layer_data *my_data) {
VkBool32 skipCall = VK_FALSE;
- for (unordered_map<VkCommandBuffer, MT_CB_INFO>::iterator ii=my_data->cbMap.begin(); ii!=my_data->cbMap.end(); ++ii) {
+ for (unordered_map<VkCommandBuffer, MT_CB_INFO>::iterator ii =
+ my_data->cbMap.begin();
+ ii != my_data->cbMap.end(); ++ii) {
skipCall |= clear_cmd_buf_and_mem_references(my_data, (*ii).first);
}
my_data->cbMap.clear();
@@ -600,35 +557,46 @@
}
// For given MemObjInfo, report Obj & CB bindings
-static VkBool32
-reportMemReferencesAndCleanUp(
- layer_data *my_data,
- MT_MEM_OBJ_INFO *pMemObjInfo)
-{
+static VkBool32 reportMemReferencesAndCleanUp(layer_data *my_data,
+ MT_MEM_OBJ_INFO *pMemObjInfo) {
VkBool32 skipCall = VK_FALSE;
size_t cmdBufRefCount = pMemObjInfo->pCommandBufferBindings.size();
- size_t objRefCount = pMemObjInfo->pObjBindings.size();
+ size_t objRefCount = pMemObjInfo->pObjBindings.size();
if ((pMemObjInfo->pCommandBufferBindings.size()) != 0) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t) pMemObjInfo->mem, __LINE__, MEMTRACK_FREED_MEM_REF, "MEM",
- "Attempting to free memory object %#" PRIxLEAST64 " which still contains " PRINTF_SIZE_T_SPECIFIER " references",
- (uint64_t) pMemObjInfo->mem, (cmdBufRefCount + objRefCount));
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)pMemObjInfo->mem, __LINE__, MEMTRACK_FREED_MEM_REF, "MEM",
+ "Attempting to free memory object %#" PRIxLEAST64
+ " which still contains " PRINTF_SIZE_T_SPECIFIER " references",
+ (uint64_t)pMemObjInfo->mem, (cmdBufRefCount + objRefCount));
}
if (cmdBufRefCount > 0 && pMemObjInfo->pCommandBufferBindings.size() > 0) {
- for (list<VkCommandBuffer>::const_iterator it = pMemObjInfo->pCommandBufferBindings.begin(); it != pMemObjInfo->pCommandBufferBindings.end(); ++it) {
+ for (list<VkCommandBuffer>::const_iterator it =
+ pMemObjInfo->pCommandBufferBindings.begin();
+ it != pMemObjInfo->pCommandBufferBindings.end(); ++it) {
// TODO : CommandBuffer should be source Obj here
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(*it), __LINE__, MEMTRACK_FREED_MEM_REF, "MEM",
- "Command Buffer %p still has a reference to mem obj %#" PRIxLEAST64, (*it), (uint64_t) pMemObjInfo->mem);
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)(*it), __LINE__, MEMTRACK_FREED_MEM_REF, "MEM",
+ "Command Buffer %p still has a reference to mem obj "
+ "%#" PRIxLEAST64,
+ (*it), (uint64_t)pMemObjInfo->mem);
}
// Clear the list of hanging references
pMemObjInfo->pCommandBufferBindings.clear();
}
if (objRefCount > 0 && pMemObjInfo->pObjBindings.size() > 0) {
- for (auto it = pMemObjInfo->pObjBindings.begin(); it != pMemObjInfo->pObjBindings.end(); ++it) {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, it->type, it->handle, __LINE__, MEMTRACK_FREED_MEM_REF, "MEM",
- "VK Object %#" PRIxLEAST64 " still has a reference to mem obj %#" PRIxLEAST64, it->handle, (uint64_t) pMemObjInfo->mem);
+ for (auto it = pMemObjInfo->pObjBindings.begin();
+ it != pMemObjInfo->pObjBindings.end(); ++it) {
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ it->type, it->handle, __LINE__, MEMTRACK_FREED_MEM_REF,
+ "MEM", "VK Object %#" PRIxLEAST64
+ " still has a reference to mem obj %#" PRIxLEAST64,
+ it->handle, (uint64_t)pMemObjInfo->mem);
}
// Clear the list of hanging references
pMemObjInfo->pObjBindings.clear();
@@ -636,42 +604,43 @@
return skipCall;
}
-static VkBool32
-deleteMemObjInfo(
- layer_data *my_data,
- void *object,
- VkDeviceMemory mem)
-{
+static VkBool32 deleteMemObjInfo(layer_data *my_data, void *object,
+ VkDeviceMemory mem) {
VkBool32 skipCall = VK_FALSE;
auto item = my_data->memObjMap.find(mem);
if (item != my_data->memObjMap.end()) {
my_data->memObjMap.erase(item);
} else {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t) mem, __LINE__, MEMTRACK_INVALID_MEM_OBJ, "MEM",
- "Request to delete memory object %#" PRIxLEAST64 " not present in memory Object Map", (uint64_t) mem);
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)mem, __LINE__, MEMTRACK_INVALID_MEM_OBJ, "MEM",
+ "Request to delete memory object %#" PRIxLEAST64
+ " not present in memory Object Map",
+ (uint64_t)mem);
}
return skipCall;
}
// Check if fence for given CB is completed
-static VkBool32
-checkCBCompleted(
- layer_data *my_data,
- const VkCommandBuffer cb,
- VkBool32 *complete)
-{
- MT_CB_INFO *pCBInfo = get_cmd_buf_info(my_data, cb);
- VkBool32 skipCall = VK_FALSE;
- *complete = VK_TRUE;
+static VkBool32 checkCBCompleted(layer_data *my_data, const VkCommandBuffer cb,
+ VkBool32 *complete) {
+ MT_CB_INFO *pCBInfo = get_cmd_buf_info(my_data, cb);
+ VkBool32 skipCall = VK_FALSE;
+ *complete = VK_TRUE;
if (pCBInfo) {
if (pCBInfo->lastSubmittedQueue != NULL) {
VkQueue queue = pCBInfo->lastSubmittedQueue;
MT_QUEUE_INFO *pQueueInfo = &my_data->queueMap[queue];
if (pCBInfo->fenceId > pQueueInfo->lastRetiredId) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)cb, __LINE__,
- MEMTRACK_NONE, "MEM", "fence %#" PRIxLEAST64 " for CB %p has not been checked for completion",
- (uint64_t) pCBInfo->lastSubmittedFence, cb);
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)cb, __LINE__, MEMTRACK_NONE, "MEM",
+ "fence %#" PRIxLEAST64
+ " for CB %p has not been checked for completion",
+ (uint64_t)pCBInfo->lastSubmittedFence, cb);
*complete = VK_FALSE;
}
}
@@ -679,32 +648,35 @@
return skipCall;
}
-static VkBool32
-freeMemObjInfo(
- layer_data *my_data,
- void* object,
- VkDeviceMemory mem,
- VkBool32 internal)
-{
+static VkBool32 freeMemObjInfo(layer_data *my_data, void *object,
+ VkDeviceMemory mem, VkBool32 internal) {
VkBool32 skipCall = VK_FALSE;
// Parse global list to find info w/ mem
- MT_MEM_OBJ_INFO* pInfo = get_mem_obj_info(my_data, mem);
+ MT_MEM_OBJ_INFO *pInfo = get_mem_obj_info(my_data, mem);
if (pInfo) {
if (pInfo->allocInfo.allocationSize == 0 && !internal) {
// TODO: Verify against Valid Use section
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t) mem, __LINE__, MEMTRACK_INVALID_MEM_OBJ, "MEM",
- "Attempting to free memory associated with a Persistent Image, %#" PRIxLEAST64 ", "
- "this should not be explicitly freed\n", (uint64_t) mem);
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)mem, __LINE__, MEMTRACK_INVALID_MEM_OBJ,
+ "MEM", "Attempting to free memory associated with a "
+ "Persistent Image, %#" PRIxLEAST64 ", "
+ "this should not be explicitly freed\n",
+ (uint64_t)mem);
} else {
// Clear any CB bindings for completed CBs
// TODO : Is there a better place to do this?
VkBool32 commandBufferComplete = VK_FALSE;
assert(pInfo->object != VK_NULL_HANDLE);
- list<VkCommandBuffer>::iterator it = pInfo->pCommandBufferBindings.begin();
+ list<VkCommandBuffer>::iterator it =
+ pInfo->pCommandBufferBindings.begin();
list<VkCommandBuffer>::iterator temp;
- while (pInfo->pCommandBufferBindings.size() > 0 && it != pInfo->pCommandBufferBindings.end()) {
- skipCall |= checkCBCompleted(my_data, *it, &commandBufferComplete);
+ while (pInfo->pCommandBufferBindings.size() > 0 &&
+ it != pInfo->pCommandBufferBindings.end()) {
+ skipCall |=
+ checkCBCompleted(my_data, *it, &commandBufferComplete);
if (VK_TRUE == commandBufferComplete) {
temp = it;
++temp;
@@ -715,7 +687,8 @@
}
}
- // Now verify that no references to this mem obj remain and remove bindings
+ // Now verify that no references to this mem obj remain and remove
+ // bindings
if (0 != pInfo->refCount) {
skipCall |= reportMemReferencesAndCleanUp(my_data, pInfo);
}
@@ -726,23 +699,19 @@
return skipCall;
}
-static const char*
-object_type_to_string(
- VkDebugReportObjectTypeEXT type)
-{
- switch (type)
- {
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- return "image";
- break;
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- return "buffer";
- break;
- case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
- return "swapchain";
- break;
- default:
- return "unknown";
+static const char *object_type_to_string(VkDebugReportObjectTypeEXT type) {
+ switch (type) {
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
+ return "image";
+ break;
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
+ return "buffer";
+ break;
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
+ return "swapchain";
+ break;
+ default:
+ return "unknown";
}
}
@@ -750,26 +719,29 @@
// 1. Remove ObjectInfo from MemObjInfo list container of obj bindings & free it
// 2. Decrement refCount for MemObjInfo
// 3. Clear mem binding for image/buffer by setting its handle to 0
-// TODO : This only applied to Buffer, Image, and Swapchain objects now, how should it be updated/customized?
-static VkBool32
-clear_object_binding(
- layer_data *my_data,
- void *dispObj,
- uint64_t handle,
- VkDebugReportObjectTypeEXT type)
-{
- // TODO : Need to customize images/buffers/swapchains to track mem binding and clear it here appropriately
+// TODO : This only applied to Buffer, Image, and Swapchain objects now, how
+// should it be updated/customized?
+static VkBool32 clear_object_binding(layer_data *my_data, void *dispObj,
+ uint64_t handle,
+ VkDebugReportObjectTypeEXT type) {
+ // TODO : Need to customize images/buffers/swapchains to track mem binding
+ // and clear it here appropriately
VkBool32 skipCall = VK_FALSE;
- MT_OBJ_BINDING_INFO* pObjBindInfo = get_object_binding_info(my_data, handle, type);
+ MT_OBJ_BINDING_INFO *pObjBindInfo =
+ get_object_binding_info(my_data, handle, type);
if (pObjBindInfo) {
- MT_MEM_OBJ_INFO* pMemObjInfo = get_mem_obj_info(my_data, pObjBindInfo->mem);
+ MT_MEM_OBJ_INFO *pMemObjInfo =
+ get_mem_obj_info(my_data, pObjBindInfo->mem);
// TODO : Make sure this is a reasonable way to reset mem binding
pObjBindInfo->mem = VK_NULL_HANDLE;
if (pMemObjInfo) {
- // This obj is bound to a memory object. Remove the reference to this object in that memory object's list, decrement the memObj's refcount
+ // This obj is bound to a memory object. Remove the reference to
+ // this object in that memory object's list, decrement the memObj's
+ // refcount
// and set the objects memory binding pointer to NULL.
VkBool32 clearSucceeded = VK_FALSE;
- for (auto it = pMemObjInfo->pObjBindings.begin(); it != pMemObjInfo->pObjBindings.end(); ++it) {
+ for (auto it = pMemObjInfo->pObjBindings.begin();
+ it != pMemObjInfo->pObjBindings.end(); ++it) {
if ((it->handle == handle) && (it->type == type)) {
pMemObjInfo->refCount--;
pMemObjInfo->pObjBindings.erase(it);
@@ -777,10 +749,15 @@
break;
}
}
- if (VK_FALSE == clearSucceeded ) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type, handle, __LINE__, MEMTRACK_INVALID_OBJECT, "MEM",
- "While trying to clear mem binding for %s obj %#" PRIxLEAST64 ", unable to find that object referenced by mem obj %#" PRIxLEAST64,
- object_type_to_string(type), handle, (uint64_t) pMemObjInfo->mem);
+ if (VK_FALSE == clearSucceeded) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type,
+ handle, __LINE__, MEMTRACK_INVALID_OBJECT, "MEM",
+ "While trying to clear mem binding for %s obj "
+ "%#" PRIxLEAST64 ", unable to find that object referenced "
+ "by mem obj %#" PRIxLEAST64,
+ object_type_to_string(type), handle,
+ (uint64_t)pMemObjInfo->mem);
}
}
}
@@ -794,50 +771,63 @@
// Add reference off of objInfo
// device is required for error logging, need a dispatchable
// object for that.
-static VkBool32
-set_mem_binding(
- layer_data *my_data,
- void *dispatch_object,
- VkDeviceMemory mem,
- uint64_t handle,
- VkDebugReportObjectTypeEXT type,
- const char *apiName)
-{
+static VkBool32 set_mem_binding(layer_data *my_data, void *dispatch_object,
+ VkDeviceMemory mem, uint64_t handle,
+ VkDebugReportObjectTypeEXT type,
+ const char *apiName) {
VkBool32 skipCall = VK_FALSE;
- // Handle NULL case separately, just clear previous binding & decrement reference
+ // Handle NULL case separately, just clear previous binding & decrement
+ // reference
if (mem == VK_NULL_HANDLE) {
// TODO: Verify against Valid Use section of spec.
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, type, handle, __LINE__, MEMTRACK_INVALID_MEM_OBJ, "MEM",
- "In %s, attempting to Bind Obj(%#" PRIxLEAST64 ") to NULL", apiName, handle);
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, type,
+ handle, __LINE__, MEMTRACK_INVALID_MEM_OBJ, "MEM",
+ "In %s, attempting to Bind Obj(%#" PRIxLEAST64 ") to NULL",
+ apiName, handle);
} else {
- MT_OBJ_BINDING_INFO* pObjBindInfo = get_object_binding_info(my_data, handle, type);
+ MT_OBJ_BINDING_INFO *pObjBindInfo =
+ get_object_binding_info(my_data, handle, type);
if (!pObjBindInfo) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type, handle, __LINE__, MEMTRACK_MISSING_MEM_BINDINGS, "MEM",
- "In %s, attempting to update Binding of %s Obj(%#" PRIxLEAST64 ") that's not in global list()",
- object_type_to_string(type), apiName, handle);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type,
+ handle, __LINE__, MEMTRACK_MISSING_MEM_BINDINGS, "MEM",
+ "In %s, attempting to update Binding of %s Obj(%#" PRIxLEAST64
+ ") that's not in global list()",
+ object_type_to_string(type), apiName, handle);
} else {
// non-null case so should have real mem obj
- MT_MEM_OBJ_INFO* pMemInfo = get_mem_obj_info(my_data, mem);
+ MT_MEM_OBJ_INFO *pMemInfo = get_mem_obj_info(my_data, mem);
if (pMemInfo) {
- // TODO : Need to track mem binding for obj and report conflict here
- MT_MEM_OBJ_INFO* pPrevBinding = get_mem_obj_info(my_data, pObjBindInfo->mem);
+ // TODO : Need to track mem binding for obj and report conflict
+ // here
+ MT_MEM_OBJ_INFO *pPrevBinding =
+ get_mem_obj_info(my_data, pObjBindInfo->mem);
if (pPrevBinding != NULL) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t) mem, __LINE__, MEMTRACK_REBIND_OBJECT, "MEM",
- "In %s, attempting to bind memory (%#" PRIxLEAST64 ") to object (%#" PRIxLEAST64 ") which has already been bound to mem object %#" PRIxLEAST64,
- apiName, (uint64_t) mem, handle, (uint64_t) pPrevBinding->mem);
- }
- else {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)mem, __LINE__, MEMTRACK_REBIND_OBJECT, "MEM",
+ "In %s, attempting to bind memory (%#" PRIxLEAST64
+ ") to object (%#" PRIxLEAST64 ") which has already "
+ "been bound to mem "
+ "object %#" PRIxLEAST64,
+ apiName, (uint64_t)mem, handle,
+ (uint64_t)pPrevBinding->mem);
+ } else {
MT_OBJ_HANDLE_TYPE oht;
oht.handle = handle;
oht.type = type;
pMemInfo->pObjBindings.push_front(oht);
pMemInfo->refCount++;
- // For image objects, make sure default memory state is correctly set
+ // For image objects, make sure default memory state is
+ // correctly set
// TODO : What's the best/correct way to handle this?
if (VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT == type) {
VkImageCreateInfo ici = pObjBindInfo->create_info.image;
- if (ici.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
- VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
+ if (ici.usage &
+ (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)) {
// TODO:: More memory state transition stuff.
}
}
@@ -855,32 +845,34 @@
// Add reference from objectInfo to memoryInfo
// Add reference off of object's binding info
// Return VK_TRUE if addition is successful, VK_FALSE otherwise
-static VkBool32
-set_sparse_mem_binding(
- layer_data *my_data,
- void *dispObject,
- VkDeviceMemory mem,
- uint64_t handle,
- VkDebugReportObjectTypeEXT type,
- const char *apiName)
-{
+static VkBool32 set_sparse_mem_binding(layer_data *my_data, void *dispObject,
+ VkDeviceMemory mem, uint64_t handle,
+ VkDebugReportObjectTypeEXT type,
+ const char *apiName) {
VkBool32 skipCall = VK_FALSE;
- // Handle NULL case separately, just clear previous binding & decrement reference
+ // Handle NULL case separately, just clear previous binding & decrement
+ // reference
if (mem == VK_NULL_HANDLE) {
skipCall = clear_object_binding(my_data, dispObject, handle, type);
} else {
- MT_OBJ_BINDING_INFO* pObjBindInfo = get_object_binding_info(my_data, handle, type);
+ MT_OBJ_BINDING_INFO *pObjBindInfo =
+ get_object_binding_info(my_data, handle, type);
if (!pObjBindInfo) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type, handle, __LINE__, MEMTRACK_MISSING_MEM_BINDINGS, "MEM",
- "In %s, attempting to update Binding of Obj(%#" PRIxLEAST64 ") that's not in global list()", apiName, handle);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type,
+ handle, __LINE__, MEMTRACK_MISSING_MEM_BINDINGS, "MEM",
+ "In %s, attempting to update Binding of Obj(%#" PRIxLEAST64
+ ") that's not in global list()",
+ apiName, handle);
}
// non-null case so should have real mem obj
- MT_MEM_OBJ_INFO* pInfo = get_mem_obj_info(my_data, mem);
+ MT_MEM_OBJ_INFO *pInfo = get_mem_obj_info(my_data, mem);
if (pInfo) {
// Search for object in memory object's binding list
- VkBool32 found = VK_FALSE;
+ VkBool32 found = VK_FALSE;
if (pInfo->pObjBindings.size() > 0) {
- for (auto it = pInfo->pObjBindings.begin(); it != pInfo->pObjBindings.end(); ++it) {
+ for (auto it = pInfo->pObjBindings.begin();
+ it != pInfo->pObjBindings.end(); ++it) {
if (((*it).handle == handle) && ((*it).type == type)) {
found = VK_TRUE;
break;
@@ -891,162 +883,196 @@
if (found == VK_FALSE) {
MT_OBJ_HANDLE_TYPE oht;
oht.handle = handle;
- oht.type = type;
+ oht.type = type;
pInfo->pObjBindings.push_front(oht);
pInfo->refCount++;
}
// Need to set mem binding for this object
- MT_MEM_OBJ_INFO* pPrevBinding = get_mem_obj_info(my_data, pObjBindInfo->mem);
+ MT_MEM_OBJ_INFO *pPrevBinding =
+ get_mem_obj_info(my_data, pObjBindInfo->mem);
pObjBindInfo->mem = mem;
}
}
return skipCall;
}
-template <typename T> void
-print_object_map_members(
- layer_data *my_data,
- void *dispObj,
- T const& objectName,
- VkDebugReportObjectTypeEXT objectType,
- const char *objectStr)
-{
- for (auto const& element : objectName) {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, objectType, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " %s Object list contains %s Object %#" PRIxLEAST64 " ", objectStr, objectStr, element.first);
+template <typename T>
+void print_object_map_members(layer_data *my_data, void *dispObj,
+ T const &objectName,
+ VkDebugReportObjectTypeEXT objectType,
+ const char *objectStr) {
+ for (auto const &element : objectName) {
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, objectType,
+ 0, __LINE__, MEMTRACK_NONE, "MEM",
+ " %s Object list contains %s Object %#" PRIxLEAST64 " ",
+ objectStr, objectStr, element.first);
}
}
// For given Object, get 'mem' obj that it's bound to or NULL if no binding
-static VkBool32
-get_mem_binding_from_object(
- layer_data *my_data,
- void *dispObj,
- const uint64_t handle,
- const VkDebugReportObjectTypeEXT type,
- VkDeviceMemory *mem)
-{
+static VkBool32 get_mem_binding_from_object(
+ layer_data *my_data, void *dispObj, const uint64_t handle,
+ const VkDebugReportObjectTypeEXT type, VkDeviceMemory *mem) {
VkBool32 skipCall = VK_FALSE;
*mem = VK_NULL_HANDLE;
- MT_OBJ_BINDING_INFO* pObjBindInfo = get_object_binding_info(my_data, handle, type);
+ MT_OBJ_BINDING_INFO *pObjBindInfo =
+ get_object_binding_info(my_data, handle, type);
if (pObjBindInfo) {
if (pObjBindInfo->mem) {
*mem = pObjBindInfo->mem;
} else {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type, handle, __LINE__, MEMTRACK_MISSING_MEM_BINDINGS, "MEM",
- "Trying to get mem binding for object %#" PRIxLEAST64 " but object has no mem binding", handle);
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type,
+ handle, __LINE__, MEMTRACK_MISSING_MEM_BINDINGS, "MEM",
+ "Trying to get mem binding for object %#" PRIxLEAST64
+ " but object has no mem binding",
+ handle);
}
} else {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type, handle, __LINE__, MEMTRACK_INVALID_OBJECT, "MEM",
- "Trying to get mem binding for object %#" PRIxLEAST64 " but no such object in %s list",
- handle, object_type_to_string(type));
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, type,
+ handle, __LINE__, MEMTRACK_INVALID_OBJECT, "MEM",
+ "Trying to get mem binding for object %#" PRIxLEAST64
+ " but no such object in %s list",
+ handle, object_type_to_string(type));
}
return skipCall;
}
// Print details of MemObjInfo list
-static void
-print_mem_list(
- layer_data *my_data,
- void *dispObj)
-{
- MT_MEM_OBJ_INFO* pInfo = NULL;
+static void print_mem_list(layer_data *my_data, void *dispObj) {
+ MT_MEM_OBJ_INFO *pInfo = NULL;
// Early out if info is not requested
if (!(my_data->report_data->active_flags & VK_DEBUG_REPORT_INFO_BIT_EXT)) {
return;
}
- // Just printing each msg individually for now, may want to package these into single large print
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- "Details of Memory Object list (of size " PRINTF_SIZE_T_SPECIFIER " elements)", my_data->memObjMap.size());
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- "=============================");
+ // Just printing each msg individually for now, may want to package these
+ // into single large print
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM",
+ "Details of Memory Object list (of size " PRINTF_SIZE_T_SPECIFIER
+ " elements)",
+ my_data->memObjMap.size());
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM", "=============================");
if (my_data->memObjMap.size() <= 0)
return;
- for (auto ii=my_data->memObjMap.begin(); ii!=my_data->memObjMap.end(); ++ii) {
+ for (auto ii = my_data->memObjMap.begin(); ii != my_data->memObjMap.end();
+ ++ii) {
pInfo = &(*ii).second;
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " ===MemObjInfo at %p===", (void*)pInfo);
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " Mem object: %#" PRIxLEAST64, (uint64_t)(pInfo->mem));
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " Ref Count: %u", pInfo->refCount);
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM", " ===MemObjInfo at %p===",
+ (void *)pInfo);
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM", " Mem object: %#" PRIxLEAST64,
+ (uint64_t)(pInfo->mem));
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM", " Ref Count: %u", pInfo->refCount);
if (0 != pInfo->allocInfo.allocationSize) {
- string pAllocInfoMsg = vk_print_vkmemoryallocateinfo(&pInfo->allocInfo, "MEM(INFO): ");
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " Mem Alloc info:\n%s", pAllocInfoMsg.c_str());
+ string pAllocInfoMsg = vk_print_vkmemoryallocateinfo(
+ &pInfo->allocInfo, "MEM(INFO): ");
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM", " Mem Alloc info:\n%s",
+ pAllocInfoMsg.c_str());
} else {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " Mem Alloc info is NULL (alloc done by vkCreateSwapchainKHR())");
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM", " Mem Alloc info is NULL (alloc "
+ "done by vkCreateSwapchainKHR())");
}
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " VK OBJECT Binding list of size " PRINTF_SIZE_T_SPECIFIER " elements:", pInfo->pObjBindings.size());
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM",
+ " VK OBJECT Binding list of size " PRINTF_SIZE_T_SPECIFIER
+ " elements:",
+ pInfo->pObjBindings.size());
if (pInfo->pObjBindings.size() > 0) {
- for (list<MT_OBJ_HANDLE_TYPE>::iterator it = pInfo->pObjBindings.begin(); it != pInfo->pObjBindings.end(); ++it) {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
+ for (list<MT_OBJ_HANDLE_TYPE>::iterator it =
+ pInfo->pObjBindings.begin();
+ it != pInfo->pObjBindings.end(); ++it) {
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0,
+ __LINE__, MEMTRACK_NONE, "MEM",
" VK OBJECT %" PRIu64, it->handle);
}
}
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " VK Command Buffer (CB) binding list of size " PRINTF_SIZE_T_SPECIFIER " elements", pInfo->pCommandBufferBindings.size());
- if (pInfo->pCommandBufferBindings.size() > 0)
- {
- for (list<VkCommandBuffer>::iterator it = pInfo->pCommandBufferBindings.begin(); it != pInfo->pCommandBufferBindings.end(); ++it) {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " VK CB %p", (*it));
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM",
+ " VK Command Buffer (CB) binding list of "
+ "size " PRINTF_SIZE_T_SPECIFIER " elements",
+ pInfo->pCommandBufferBindings.size());
+ if (pInfo->pCommandBufferBindings.size() > 0) {
+ for (list<VkCommandBuffer>::iterator it =
+ pInfo->pCommandBufferBindings.begin();
+ it != pInfo->pCommandBufferBindings.end(); ++it) {
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0,
+ __LINE__, MEMTRACK_NONE, "MEM", " VK CB %p",
+ (*it));
}
}
}
}
-static void
-printCBList(
- layer_data *my_data,
- void *dispObj)
-{
- MT_CB_INFO* pCBInfo = NULL;
+static void printCBList(layer_data *my_data, void *dispObj) {
+ MT_CB_INFO *pCBInfo = NULL;
// Early out if info is not requested
if (!(my_data->report_data->active_flags & VK_DEBUG_REPORT_INFO_BIT_EXT)) {
return;
}
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- "Details of CB list (of size " PRINTF_SIZE_T_SPECIFIER " elements)", my_data->cbMap.size());
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- "==================");
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM",
+ "Details of CB list (of size " PRINTF_SIZE_T_SPECIFIER " elements)",
+ my_data->cbMap.size());
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM", "==================");
if (my_data->cbMap.size() <= 0)
return;
- for (auto ii=my_data->cbMap.begin(); ii!=my_data->cbMap.end(); ++ii) {
+ for (auto ii = my_data->cbMap.begin(); ii != my_data->cbMap.end(); ++ii) {
pCBInfo = &(*ii).second;
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " CB Info (%p) has CB %p, fenceId %" PRIx64", and fence %#" PRIxLEAST64,
- (void*)pCBInfo, (void*)pCBInfo->commandBuffer, pCBInfo->fenceId,
- (uint64_t) pCBInfo->lastSubmittedFence);
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM",
+ " CB Info (%p) has CB %p, fenceId %" PRIx64
+ ", and fence %#" PRIxLEAST64,
+ (void *)pCBInfo, (void *)pCBInfo->commandBuffer,
+ pCBInfo->fenceId, (uint64_t)pCBInfo->lastSubmittedFence);
if (pCBInfo->pMemObjList.size() <= 0)
continue;
- for (list<VkDeviceMemory>::iterator it = pCBInfo->pMemObjList.begin(); it != pCBInfo->pMemObjList.end(); ++it) {
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__, MEMTRACK_NONE, "MEM",
- " Mem obj %" PRIu64, (uint64_t)(*it));
+ for (list<VkDeviceMemory>::iterator it = pCBInfo->pMemObjList.begin();
+ it != pCBInfo->pMemObjList.end(); ++it) {
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, 0, __LINE__,
+ MEMTRACK_NONE, "MEM", " Mem obj %" PRIu64,
+ (uint64_t)(*it));
}
}
}
-static void
-init_mem_tracker(
- layer_data *my_data,
- const VkAllocationCallbacks *pAllocator)
-{
+static void init_mem_tracker(layer_data *my_data,
+ const VkAllocationCallbacks *pAllocator) {
uint32_t report_flags = 0;
uint32_t debug_action = 0;
FILE *log_output = NULL;
@@ -1054,10 +1080,9 @@
VkDebugReportCallbackEXT callback;
// initialize MemTracker options
report_flags = getLayerOptionFlags("MemTrackerReportFlags", 0);
- getLayerOptionEnum("MemTrackerDebugAction", (uint32_t *) &debug_action);
+ getLayerOptionEnum("MemTrackerDebugAction", (uint32_t *)&debug_action);
- if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG)
- {
+ if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
option_str = getLayerOption("MemTrackerLogFilename");
log_output = getLayerLogOutput(option_str, "MemTracker");
VkDebugReportCallbackCreateInfoEXT dbgInfo;
@@ -1066,7 +1091,8 @@
dbgInfo.pfnCallback = log_callback;
dbgInfo.pUserData = log_output;
dbgInfo.flags = report_flags;
- layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator, &callback);
+ layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator,
+ &callback);
my_data->logging_callback.push_back(callback);
}
@@ -1077,12 +1103,12 @@
dbgInfo.pfnCallback = win32_debug_output_msg;
dbgInfo.pUserData = log_output;
dbgInfo.flags = report_flags;
- layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator, &callback);
+ layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator,
+ &callback);
my_data->logging_callback.push_back(callback);
}
- if (!globalLockInitialized)
- {
+ if (!globalLockInitialized) {
loader_platform_thread_create_mutex(&globalLock);
globalLockInitialized = 1;
}
@@ -1092,10 +1118,9 @@
}
// hook DestroyInstance to remove tableInstanceMap entry
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks *pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyInstance(VkInstance instance,
+ const VkAllocationCallbacks *pAllocator) {
// Grab the key before the instance is destroyed.
dispatch_key key = get_dispatch_key(instance);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
@@ -1119,16 +1144,18 @@
}
}
-VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance)
-{
- VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkInstance *pInstance) {
+ VkLayerInstanceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkCreateInstance fpCreateInstance =
+ (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
if (fpCreateInstance == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -1141,15 +1168,16 @@
return result;
}
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
my_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
- layer_init_instance_dispatch_table(*pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
+ layer_init_instance_dispatch_table(
+ *pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
my_data->report_data = debug_report_create_instance(
- my_data->instance_dispatch_table,
- *pInstance,
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
+ my_data->instance_dispatch_table, *pInstance,
+ pCreateInfo->enabledExtensionCount,
+ pCreateInfo->ppEnabledExtensionNames);
init_mem_tracker(my_data, pAllocator);
@@ -1157,37 +1185,43 @@
}
static void
-createDeviceRegisterExtensions(
- const VkDeviceCreateInfo *pCreateInfo,
- VkDevice device)
-{
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo,
+ VkDevice device) {
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkLayerDispatchTable *pDisp = my_device_data->device_dispatch_table;
PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
- pDisp->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
- pDisp->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
- pDisp->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
- pDisp->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
- pDisp->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
+ pDisp->CreateSwapchainKHR =
+ (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
+ pDisp->DestroySwapchainKHR =
+ (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
+ pDisp->GetSwapchainImagesKHR =
+ (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
+ pDisp->AcquireNextImageKHR =
+ (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
+ pDisp->QueuePresentKHR =
+ (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
my_device_data->wsi_enabled = VK_FALSE;
for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0)
my_device_data->wsi_enabled = true;
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(
- VkPhysicalDevice gpu,
- const VkDeviceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkDevice *pDevice)
-{
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDevice(VkPhysicalDevice gpu, const VkDeviceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
+ VkLayerDeviceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+ PFN_vkCreateDevice fpCreateDevice =
+ (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
if (fpCreateDevice == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -1200,45 +1234,59 @@
return result;
}
- layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
+ layer_data *my_instance_data =
+ get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
// Setup device dispatch table
my_device_data->device_dispatch_table = new VkLayerDispatchTable;
- layer_init_device_dispatch_table(*pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
+ layer_init_device_dispatch_table(
+ *pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
- my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
+ my_device_data->report_data = layer_debug_report_create_device(
+ my_instance_data->report_data, *pDevice);
createDeviceRegisterExtensions(pCreateInfo, *pDevice);
- my_instance_data->instance_dispatch_table->GetPhysicalDeviceProperties(gpu, &my_device_data->properties);
+ my_instance_data->instance_dispatch_table->GetPhysicalDeviceProperties(
+ gpu, &my_device_data->properties);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks *pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
dispatch_key key = get_dispatch_key(device);
layer_data *my_device_data = get_my_data_ptr(key, layer_data_map);
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
- log_msg(my_device_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, (uint64_t)device, __LINE__, MEMTRACK_NONE, "MEM",
- "Printing List details prior to vkDestroyDevice()");
- log_msg(my_device_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, (uint64_t)device, __LINE__, MEMTRACK_NONE, "MEM",
- "================================================");
+ log_msg(my_device_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, (uint64_t)device, __LINE__,
+ MEMTRACK_NONE, "MEM",
+ "Printing List details prior to vkDestroyDevice()");
+ log_msg(my_device_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, (uint64_t)device, __LINE__,
+ MEMTRACK_NONE, "MEM",
+ "================================================");
print_mem_list(my_device_data, device);
printCBList(my_device_data, device);
skipCall = delete_cmd_buf_info_list(my_device_data);
// Report any memory leaks
- MT_MEM_OBJ_INFO* pInfo = NULL;
+ MT_MEM_OBJ_INFO *pInfo = NULL;
if (my_device_data->memObjMap.size() > 0) {
- for (auto ii=my_device_data->memObjMap.begin(); ii!=my_device_data->memObjMap.end(); ++ii) {
+ for (auto ii = my_device_data->memObjMap.begin();
+ ii != my_device_data->memObjMap.end(); ++ii) {
pInfo = &(*ii).second;
if (pInfo->allocInfo.allocationSize != 0) {
- // Valid Usage: All child objects created on device must have been destroyed prior to destroying device
- skipCall |= log_msg(my_device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t) pInfo->mem, __LINE__, MEMTRACK_MEMORY_LEAK, "MEM",
- "Mem Object %" PRIu64 " has not been freed. You should clean up this memory by calling "
- "vkFreeMemory(%" PRIu64 ") prior to vkDestroyDevice().", (uint64_t)(pInfo->mem), (uint64_t)(pInfo->mem));
+ // Valid Usage: All child objects created on device must have
+ // been destroyed prior to destroying device
+ skipCall |= log_msg(
+ my_device_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)pInfo->mem, __LINE__, MEMTRACK_MEMORY_LEAK, "MEM",
+ "Mem Object %" PRIu64 " has not been freed. You should "
+ "clean up this memory by calling "
+ "vkFreeMemory(%" PRIu64 ") prior to vkDestroyDevice().",
+ (uint64_t)(pInfo->mem), (uint64_t)(pInfo->mem));
}
}
}
@@ -1250,7 +1298,7 @@
#if DISPATCH_MAP_DEBUG
fprintf(stderr, "Device: %p, key: %p\n", device, key);
#endif
- VkLayerDispatchTable *pDisp = my_device_data->device_dispatch_table;
+ VkLayerDispatchTable *pDisp = my_device_data->device_dispatch_table;
if (VK_FALSE == skipCall) {
pDisp->DestroyDevice(device, pAllocator);
}
@@ -1259,58 +1307,52 @@
}
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties *pMemoryProperties)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- VkLayerInstanceDispatchTable *pInstanceTable = my_data->instance_dispatch_table;
- pInstanceTable->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
- memcpy(&memProps, pMemoryProperties, sizeof(VkPhysicalDeviceMemoryProperties));
+ VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ VkLayerInstanceDispatchTable *pInstanceTable =
+ my_data->instance_dispatch_table;
+ pInstanceTable->GetPhysicalDeviceMemoryProperties(physicalDevice,
+ pMemoryProperties);
+ memcpy(&memProps, pMemoryProperties,
+ sizeof(VkPhysicalDeviceMemoryProperties));
}
static const VkExtensionProperties instance_extensions[] = {
- {
- VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
- VK_EXT_DEBUG_REPORT_SPEC_VERSION
- }
-};
+ {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
- const char *pLayerName,
- uint32_t *pCount,
- VkExtensionProperties *pProperties)
-{
- return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceExtensionProperties(const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
+ return util_GetExtensionProperties(1, instance_extensions, pCount,
+ pProperties);
}
-static const VkLayerProperties mtGlobalLayers[] = {
- {
- "VK_LAYER_LUNARG_mem_tracker",
- VK_API_VERSION,
- VK_MAKE_VERSION(0, 1, 0),
- "Validation layer: mem_tracker",
- }
-};
+static const VkLayerProperties mtGlobalLayers[] = {{
+ "VK_LAYER_LUNARG_mem_tracker", VK_API_VERSION, VK_MAKE_VERSION(0, 1, 0),
+ "Validation layer: mem_tracker",
+}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
- uint32_t *pCount,
- VkLayerProperties *pProperties)
-{
- return util_GetLayerProperties(ARRAY_SIZE(mtGlobalLayers),
- mtGlobalLayers,
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+ VkLayerProperties *pProperties) {
+ return util_GetLayerProperties(ARRAY_SIZE(mtGlobalLayers), mtGlobalLayers,
pCount, pProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(
- VkPhysicalDevice physicalDevice,
- const char *pLayerName,
- uint32_t *pCount,
- VkExtensionProperties *pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+ const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
/* Mem tracker does not have any physical device extensions */
if (pLayerName == NULL) {
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- VkLayerInstanceDispatchTable *pInstanceTable = my_data->instance_dispatch_table;
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ VkLayerInstanceDispatchTable *pInstanceTable =
+ my_data->instance_dispatch_table;
return pInstanceTable->EnumerateDeviceExtensionProperties(
physicalDevice, NULL, pCount, pProperties);
} else {
@@ -1318,42 +1360,38 @@
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t *pCount,
- VkLayerProperties *pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
+ uint32_t *pCount,
+ VkLayerProperties *pProperties) {
/* Mem tracker's physical device layers are the same as global */
return util_GetLayerProperties(ARRAY_SIZE(mtGlobalLayers), mtGlobalLayers,
pCount, pProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(
- VkDevice device,
- uint32_t queueNodeIndex,
- uint32_t queueIndex,
- VkQueue *pQueue)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- my_data->device_dispatch_table->GetDeviceQueue(device, queueNodeIndex, queueIndex, pQueue);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetDeviceQueue(VkDevice device, uint32_t queueNodeIndex,
+ uint32_t queueIndex, VkQueue *pQueue) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ my_data->device_dispatch_table->GetDeviceQueue(device, queueNodeIndex,
+ queueIndex, pQueue);
loader_platform_thread_lock_mutex(&globalLock);
add_queue_info(my_data, *pQueue);
loader_platform_thread_unlock_mutex(&globalLock);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo *pSubmits,
- VkFence fence)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueueSubmit(VkQueue queue, uint32_t submitCount,
+ const VkSubmitInfo *pSubmits, VkFence fence) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
loader_platform_thread_lock_mutex(&globalLock);
// TODO : Need to track fence and clear mem references when fence clears
- MT_CB_INFO* pCBInfo = NULL;
- uint64_t fenceId = 0;
+ MT_CB_INFO *pCBInfo = NULL;
+ uint64_t fenceId = 0;
VkBool32 skipCall = add_fence_info(my_data, fence, queue, &fenceId);
print_mem_list(my_data, queue);
@@ -1366,7 +1404,7 @@
pCBInfo->fenceId = fenceId;
pCBInfo->lastSubmittedFence = fence;
pCBInfo->lastSubmittedQueue = queue;
- for (auto& function : pCBInfo->validate_functions) {
+ for (auto &function : pCBInfo->validate_functions) {
skipCall |= function();
}
}
@@ -1375,11 +1413,16 @@
for (uint32_t i = 0; i < submit->waitSemaphoreCount; i++) {
VkSemaphore sem = submit->pWaitSemaphores[i];
- if (my_data->semaphoreMap.find(sem) != my_data->semaphoreMap.end()) {
- if (my_data->semaphoreMap[sem] != MEMTRACK_SEMAPHORE_STATE_SIGNALLED) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, (uint64_t) sem,
- __LINE__, MEMTRACK_NONE, "SEMAPHORE",
- "vkQueueSubmit: Semaphore must be in signaled state before passing to pWaitSemaphores");
+ if (my_data->semaphoreMap.find(sem) !=
+ my_data->semaphoreMap.end()) {
+ if (my_data->semaphoreMap[sem] !=
+ MEMTRACK_SEMAPHORE_STATE_SIGNALLED) {
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ (uint64_t)sem, __LINE__, MEMTRACK_NONE, "SEMAPHORE",
+ "vkQueueSubmit: Semaphore must be in signaled state "
+ "before passing to pWaitSemaphores");
}
my_data->semaphoreMap[sem] = MEMTRACK_SEMAPHORE_STATE_WAIT;
}
@@ -1387,11 +1430,16 @@
for (uint32_t i = 0; i < submit->signalSemaphoreCount; i++) {
VkSemaphore sem = submit->pSignalSemaphores[i];
- if (my_data->semaphoreMap.find(sem) != my_data->semaphoreMap.end()) {
- if (my_data->semaphoreMap[sem] != MEMTRACK_SEMAPHORE_STATE_UNSET) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, (uint64_t) sem,
- __LINE__, MEMTRACK_NONE, "SEMAPHORE",
- "vkQueueSubmit: Semaphore must not be currently signaled or in a wait state");
+ if (my_data->semaphoreMap.find(sem) !=
+ my_data->semaphoreMap.end()) {
+ if (my_data->semaphoreMap[sem] !=
+ MEMTRACK_SEMAPHORE_STATE_UNSET) {
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+ (uint64_t)sem, __LINE__, MEMTRACK_NONE, "SEMAPHORE",
+ "vkQueueSubmit: Semaphore must not be currently "
+ "signaled or in a wait state");
}
my_data->semaphoreMap[sem] = MEMTRACK_SEMAPHORE_STATE_SIGNALLED;
}
@@ -1400,8 +1448,8 @@
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->QueueSubmit(
- queue, submitCount, pSubmits, fence);
+ result = my_data->device_dispatch_table->QueueSubmit(queue, submitCount,
+ pSubmits, fence);
}
loader_platform_thread_lock_mutex(&globalLock);
@@ -1410,7 +1458,8 @@
for (uint32_t i = 0; i < submit->waitSemaphoreCount; i++) {
VkSemaphore sem = submit->pWaitSemaphores[i];
- if (my_data->semaphoreMap.find(sem) != my_data->semaphoreMap.end()) {
+ if (my_data->semaphoreMap.find(sem) !=
+ my_data->semaphoreMap.end()) {
my_data->semaphoreMap[sem] = MEMTRACK_SEMAPHORE_STATE_UNSET;
}
}
@@ -1420,14 +1469,14 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo *pAllocateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkDeviceMemory *pMemory)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDeviceMemory *pMemory) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->AllocateMemory(
+ device, pAllocateInfo, pAllocator, pMemory);
// TODO : Track allocations and overall size here
loader_platform_thread_lock_mutex(&globalLock);
add_mem_obj_info(my_data, device, *pMemory, pAllocateInfo);
@@ -1436,20 +1485,24 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkFreeMemory(
- VkDevice device,
- VkDeviceMemory mem,
- const VkAllocationCallbacks *pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkFreeMemory(VkDevice device, VkDeviceMemory mem,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
my_data->bufferRanges.erase(mem);
my_data->imageRanges.erase(mem);
- // From spec : A memory object is freed by calling vkFreeMemory() when it is no longer needed.
- // Before freeing a memory object, an application must ensure the memory object is no longer
- // in use by the device—for example by command buffers queued for execution. The memory need
- // not yet be unbound from all images and buffers, but any further use of those images or
- // buffers (on host or device) for anything other than destroying those objects will result in
+ // From spec : A memory object is freed by calling vkFreeMemory() when it is
+ // no longer needed.
+ // Before freeing a memory object, an application must ensure the memory
+ // object is no longer
+ // in use by the device—for example by command buffers queued for execution.
+ // The memory need
+ // not yet be unbound from all images and buffers, but any further use of
+ // those images or
+ // buffers (on host or device) for anything other than destroying those
+ // objects will result in
// undefined behavior.
loader_platform_thread_lock_mutex(&globalLock);
@@ -1460,54 +1513,65 @@
my_data->device_dispatch_table->FreeMemory(device, mem, pAllocator);
}
-VkBool32
-validateMemRange(
- layer_data *my_data,
- VkDeviceMemory mem,
- VkDeviceSize offset,
- VkDeviceSize size)
-{
+VkBool32 validateMemRange(layer_data *my_data, VkDeviceMemory mem,
+ VkDeviceSize offset, VkDeviceSize size) {
VkBool32 skipCall = VK_FALSE;
if (size == 0) {
- // TODO: a size of 0 is not listed as an invalid use in the spec, should it be?
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)mem, __LINE__,
- MEMTRACK_INVALID_MAP, "MEM", "VkMapMemory: Attempting to map memory range of size zero");
+ // TODO: a size of 0 is not listed as an invalid use in the spec, should
+ // it be?
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)mem, __LINE__, MEMTRACK_INVALID_MAP, "MEM",
+ "VkMapMemory: Attempting to map memory range of size zero");
}
auto mem_element = my_data->memObjMap.find(mem);
if (mem_element != my_data->memObjMap.end()) {
- // It is an application error to call VkMapMemory on an object that is already mapped
+ // It is an application error to call VkMapMemory on an object that is
+ // already mapped
if (mem_element->second.memRange.size != 0) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)mem, __LINE__,
- MEMTRACK_INVALID_MAP, "MEM", "VkMapMemory: Attempting to map memory on an already-mapped object %#" PRIxLEAST64, (uint64_t)mem);
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)mem, __LINE__, MEMTRACK_INVALID_MAP, "MEM",
+ "VkMapMemory: Attempting to map memory on an "
+ "already-mapped object %#" PRIxLEAST64,
+ (uint64_t)mem);
}
// Validate that offset + size is within object's allocationSize
if (size == VK_WHOLE_SIZE) {
if (offset >= mem_element->second.allocInfo.allocationSize) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)mem, __LINE__,
- MEMTRACK_INVALID_MAP, "MEM", "Mapping Memory from %" PRIu64 " to %" PRIu64 " with total array size %" PRIu64,
- offset, mem_element->second.allocInfo.allocationSize, mem_element->second.allocInfo.allocationSize);
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)mem, __LINE__, MEMTRACK_INVALID_MAP, "MEM",
+ "Mapping Memory from %" PRIu64 " to %" PRIu64
+ " with total array size %" PRIu64,
+ offset, mem_element->second.allocInfo.allocationSize,
+ mem_element->second.allocInfo.allocationSize);
}
} else {
- if ((offset + size) > mem_element->second.allocInfo.allocationSize) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)mem, __LINE__,
- MEMTRACK_INVALID_MAP, "MEM", "Mapping Memory from %" PRIu64 " to %" PRIu64 " with total array size %" PRIu64,
- offset, size + offset, mem_element->second.allocInfo.allocationSize);
+ if ((offset + size) >
+ mem_element->second.allocInfo.allocationSize) {
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)mem, __LINE__, MEMTRACK_INVALID_MAP,
+ "MEM", "Mapping Memory from %" PRIu64 " to %" PRIu64
+ " with total array size %" PRIu64,
+ offset, size + offset,
+ mem_element->second.allocInfo.allocationSize);
}
}
}
return skipCall;
}
-void
-storeMemRanges(
- layer_data *my_data,
- VkDeviceMemory mem,
- VkDeviceSize offset,
- VkDeviceSize size)
- {
+void storeMemRanges(layer_data *my_data, VkDeviceMemory mem,
+ VkDeviceSize offset, VkDeviceSize size) {
auto mem_element = my_data->memObjMap.find(mem);
if (mem_element != my_data->memObjMap.end()) {
MemRange new_range;
@@ -1517,17 +1581,19 @@
}
}
-VkBool32 deleteMemRanges(
- layer_data *my_data,
- VkDeviceMemory mem)
-{
+VkBool32 deleteMemRanges(layer_data *my_data, VkDeviceMemory mem) {
VkBool32 skipCall = VK_FALSE;
auto mem_element = my_data->memObjMap.find(mem);
if (mem_element != my_data->memObjMap.end()) {
if (!mem_element->second.memRange.size) {
// Valid Usage: memory must currently be mapped
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)mem, __LINE__, MEMTRACK_INVALID_MAP, "MEM",
- "Unmapping Memory without memory being mapped: mem obj %#" PRIxLEAST64, (uint64_t)mem);
+ skipCall =
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)mem, __LINE__, MEMTRACK_INVALID_MAP, "MEM",
+ "Unmapping Memory without memory being mapped: mem obj "
+ "%#" PRIxLEAST64,
+ (uint64_t)mem);
}
mem_element->second.memRange.size = 0;
if (mem_element->second.pData) {
@@ -1540,18 +1606,14 @@
static char NoncoherentMemoryFillValue = 0xb;
-void
-initializeAndTrackMemory(
- layer_data *my_data,
- VkDeviceMemory mem,
- VkDeviceSize size,
- void **ppData)
-{
+void initializeAndTrackMemory(layer_data *my_data, VkDeviceMemory mem,
+ VkDeviceSize size, void **ppData) {
auto mem_element = my_data->memObjMap.find(mem);
if (mem_element != my_data->memObjMap.end()) {
mem_element->second.pDriverData = *ppData;
uint32_t index = mem_element->second.allocInfo.memoryTypeIndex;
- if (memProps.memoryTypes[index].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
+ if (memProps.memoryTypes[index].propertyFlags &
+ VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
mem_element->second.pData = 0;
} else {
if (size == VK_WHOLE_SIZE) {
@@ -1559,50 +1621,53 @@
}
size_t convSize = (size_t)(size);
mem_element->second.pData = malloc(2 * convSize);
- memset(mem_element->second.pData, NoncoherentMemoryFillValue, 2 * convSize);
- *ppData = static_cast<char*>(mem_element->second.pData) + (convSize / 2);
+ memset(mem_element->second.pData, NoncoherentMemoryFillValue,
+ 2 * convSize);
+ *ppData =
+ static_cast<char *>(mem_element->second.pData) + (convSize / 2);
}
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(
- VkDevice device,
- VkDeviceMemory mem,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkFlags flags,
- void **ppData)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkBool32 skipCall = VK_FALSE;
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset,
+ VkDeviceSize size, VkFlags flags, void **ppData) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkBool32 skipCall = VK_FALSE;
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
loader_platform_thread_lock_mutex(&globalLock);
MT_MEM_OBJ_INFO *pMemObj = get_mem_obj_info(my_data, mem);
if (pMemObj) {
pMemObj->valid = true;
- if ((memProps.memoryTypes[pMemObj->allocInfo.memoryTypeIndex].propertyFlags &
+ if ((memProps.memoryTypes[pMemObj->allocInfo.memoryTypeIndex]
+ .propertyFlags &
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- (uint64_t) mem, __LINE__, MEMTRACK_INVALID_STATE, "MEM",
- "Mapping Memory without VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT set: mem obj %#" PRIxLEAST64, (uint64_t) mem);
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)mem,
+ __LINE__, MEMTRACK_INVALID_STATE, "MEM",
+ "Mapping Memory without VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT "
+ "set: mem obj %#" PRIxLEAST64,
+ (uint64_t)mem);
}
}
skipCall |= validateMemRange(my_data, mem, offset, size);
storeMemRanges(my_data, mem, offset, size);
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->MapMemory(device, mem, offset, size, flags, ppData);
+ result = my_data->device_dispatch_table->MapMemory(device, mem, offset,
+ size, flags, ppData);
initializeAndTrackMemory(my_data, mem, size, ppData);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(
- VkDevice device,
- VkDeviceMemory mem)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkBool32 skipCall = VK_FALSE;
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkUnmapMemory(VkDevice device, VkDeviceMemory mem) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
skipCall |= deleteMemRanges(my_data, mem);
@@ -1612,98 +1677,112 @@
}
}
-VkBool32
-validateMemoryIsMapped(
- layer_data *my_data,
- uint32_t memRangeCount,
- const VkMappedMemoryRange *pMemRanges)
-{
+VkBool32 validateMemoryIsMapped(layer_data *my_data, uint32_t memRangeCount,
+ const VkMappedMemoryRange *pMemRanges) {
VkBool32 skipCall = VK_FALSE;
for (uint32_t i = 0; i < memRangeCount; ++i) {
auto mem_element = my_data->memObjMap.find(pMemRanges[i].memory);
if (mem_element != my_data->memObjMap.end()) {
if (mem_element->second.memRange.offset > pMemRanges[i].offset ||
- (mem_element->second.memRange.offset + mem_element->second.memRange.size) < (pMemRanges[i].offset + pMemRanges[i].size)) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)pMemRanges[i].memory,
- __LINE__, MEMTRACK_INVALID_MAP, "MEM", "Memory must be mapped before it can be flushed or invalidated.");
+ (mem_element->second.memRange.offset +
+ mem_element->second.memRange.size) <
+ (pMemRanges[i].offset + pMemRanges[i].size)) {
+ skipCall |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)pMemRanges[i].memory, __LINE__,
+ MEMTRACK_INVALID_MAP, "MEM",
+ "Memory must be mapped before it can be flushed or "
+ "invalidated.");
}
}
}
return skipCall;
}
-VkBool32
-validateAndCopyNoncoherentMemoryToDriver(
- layer_data *my_data,
- uint32_t memRangeCount,
- const VkMappedMemoryRange *pMemRanges)
-{
+VkBool32 validateAndCopyNoncoherentMemoryToDriver(
+ layer_data *my_data, uint32_t memRangeCount,
+ const VkMappedMemoryRange *pMemRanges) {
VkBool32 skipCall = VK_FALSE;
for (uint32_t i = 0; i < memRangeCount; ++i) {
auto mem_element = my_data->memObjMap.find(pMemRanges[i].memory);
if (mem_element != my_data->memObjMap.end()) {
if (mem_element->second.pData) {
- VkDeviceSize size = mem_element->second.memRange.size;
+ VkDeviceSize size = mem_element->second.memRange.size;
VkDeviceSize half_size = (size / 2);
- char* data = static_cast<char*>(mem_element->second.pData);
+ char *data = static_cast<char *>(mem_element->second.pData);
for (auto j = 0; j < half_size; ++j) {
if (data[j] != NoncoherentMemoryFillValue) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)pMemRanges[i].memory,
- __LINE__, MEMTRACK_INVALID_MAP, "MEM", "Memory overflow was detected on mem obj %" PRIxLEAST64, (uint64_t)pMemRanges[i].memory);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)pMemRanges[i].memory, __LINE__,
+ MEMTRACK_INVALID_MAP, "MEM", "Memory overflow was "
+ "detected on mem obj "
+ "%" PRIxLEAST64,
+ (uint64_t)pMemRanges[i].memory);
}
}
for (auto j = size + half_size; j < 2 * size; ++j) {
if (data[j] != NoncoherentMemoryFillValue) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)pMemRanges[i].memory,
- __LINE__, MEMTRACK_INVALID_MAP, "MEM", "Memory overflow was detected on mem obj %" PRIxLEAST64, (uint64_t)pMemRanges[i].memory);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ (uint64_t)pMemRanges[i].memory, __LINE__,
+ MEMTRACK_INVALID_MAP, "MEM", "Memory overflow was "
+ "detected on mem obj "
+ "%" PRIxLEAST64,
+ (uint64_t)pMemRanges[i].memory);
}
}
- memcpy(mem_element->second.pDriverData, static_cast<void*>(data + (size_t)(half_size)), (size_t)(size));
+ memcpy(mem_element->second.pDriverData,
+ static_cast<void *>(data + (size_t)(half_size)),
+ (size_t)(size));
}
}
}
return skipCall;
}
-VK_LAYER_EXPORT VkResult VKAPI_CALL vkFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memRangeCount,
- const VkMappedMemoryRange *pMemRanges)
-{
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VkResult VKAPI_CALL
+ vkFlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
+ const VkMappedMemoryRange *pMemRanges) {
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- skipCall |= validateAndCopyNoncoherentMemoryToDriver(my_data, memRangeCount, pMemRanges);
- skipCall |= validateMemoryIsMapped(my_data, memRangeCount, pMemRanges);
- if (VK_FALSE == skipCall ) {
- result = my_data->device_dispatch_table->FlushMappedMemoryRanges(device, memRangeCount, pMemRanges);
+ skipCall |= validateAndCopyNoncoherentMemoryToDriver(my_data, memRangeCount,
+ pMemRanges);
+ skipCall |= validateMemoryIsMapped(my_data, memRangeCount, pMemRanges);
+ if (VK_FALSE == skipCall) {
+ result = my_data->device_dispatch_table->FlushMappedMemoryRanges(
+ device, memRangeCount, pMemRanges);
}
return result;
}
-VK_LAYER_EXPORT VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memRangeCount,
- const VkMappedMemoryRange *pMemRanges)
-{
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VkResult VKAPI_CALL
+ vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount,
+ const VkMappedMemoryRange *pMemRanges) {
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
skipCall |= validateMemoryIsMapped(my_data, memRangeCount, pMemRanges);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->InvalidateMappedMemoryRanges(device, memRangeCount, pMemRanges);
+ result = my_data->device_dispatch_table->InvalidateMappedMemoryRanges(
+ device, memRangeCount, pMemRanges);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyFence(
- VkDevice device,
- VkFence fence,
- const VkAllocationCallbacks *pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyFence(VkDevice device, VkFence fence,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
loader_platform_thread_lock_mutex(&globalLock);
delete_fence_info(my_data, fence);
auto item = my_data->fenceMap.find(fence);
@@ -1714,36 +1793,37 @@
my_data->device_dispatch_table->DestroyFence(device, fence, pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(
- VkDevice device,
- VkBuffer buffer,
- const VkAllocationCallbacks *pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyBuffer(VkDevice device, VkBuffer buffer,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
auto item = my_data->bufferMap.find((uint64_t)buffer);
if (item != my_data->bufferMap.end()) {
- skipCall = clear_object_binding(my_data, device, (uint64_t)buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
+ skipCall = clear_object_binding(my_data, device, (uint64_t)buffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
my_data->bufferMap.erase(item);
}
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->DestroyBuffer(device, buffer, pAllocator);
+ my_data->device_dispatch_table->DestroyBuffer(device, buffer,
+ pAllocator);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyImage(
- VkDevice device,
- VkImage image,
- const VkAllocationCallbacks *pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyImage(VkDevice device, VkImage image,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
auto item = my_data->imageMap.find((uint64_t)image);
if (item != my_data->imageMap.end()) {
- skipCall = clear_object_binding(my_data, device, (uint64_t)image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
+ skipCall = clear_object_binding(my_data, device, (uint64_t)image,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
my_data->imageMap.erase(item);
}
loader_platform_thread_unlock_mutex(&globalLock);
@@ -1752,38 +1832,53 @@
}
}
-VkBool32 print_memory_range_error(layer_data *my_data, const uint64_t object_handle, const uint64_t other_handle, VkDebugReportObjectTypeEXT object_type) {
+VkBool32 print_memory_range_error(layer_data *my_data,
+ const uint64_t object_handle,
+ const uint64_t other_handle,
+ VkDebugReportObjectTypeEXT object_type) {
if (object_type == VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT) {
- return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, 0, MEMTRACK_INVALID_ALIASING, "MEM",
- "Buffer %" PRIx64 " is alised with image %" PRIx64, object_handle, other_handle);
+ return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ object_type, object_handle, 0, MEMTRACK_INVALID_ALIASING,
+ "MEM",
+ "Buffer %" PRIx64 " is alised with image %" PRIx64,
+ object_handle, other_handle);
} else {
- return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, object_type, object_handle, 0, MEMTRACK_INVALID_ALIASING, "MEM",
- "Image %" PRIx64 " is alised with buffer %" PRIx64, object_handle, other_handle);
+ return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ object_type, object_handle, 0, MEMTRACK_INVALID_ALIASING,
+ "MEM",
+ "Image %" PRIx64 " is alised with buffer %" PRIx64,
+ object_handle, other_handle);
}
}
-VkBool32 validate_memory_range(layer_data *my_data, const unordered_map<VkDeviceMemory, vector<MEMORY_RANGE>>& memory, const MEMORY_RANGE& new_range, VkDebugReportObjectTypeEXT object_type) {
+VkBool32 validate_memory_range(
+ layer_data *my_data,
+ const unordered_map<VkDeviceMemory, vector<MEMORY_RANGE>> &memory,
+ const MEMORY_RANGE &new_range, VkDebugReportObjectTypeEXT object_type) {
VkBool32 skip_call = false;
- if (!memory.count(new_range.memory)) return false;
- const vector<MEMORY_RANGE>& ranges = memory.at(new_range.memory);
+ if (!memory.count(new_range.memory))
+ return false;
+ const vector<MEMORY_RANGE> &ranges = memory.at(new_range.memory);
for (auto range : ranges) {
- if ((range.end & ~(my_data->properties.limits.bufferImageGranularity - 1)) < new_range.start) continue;
- if (range.start > (new_range.end & ~(my_data->properties.limits.bufferImageGranularity - 1))) continue;
- skip_call |= print_memory_range_error(my_data, new_range.handle, range.handle, object_type);
+ if ((range.end & ~(my_data->properties.limits.bufferImageGranularity -
+ 1)) < new_range.start)
+ continue;
+ if (range.start >
+ (new_range.end &
+ ~(my_data->properties.limits.bufferImageGranularity - 1)))
+ continue;
+ skip_call |= print_memory_range_error(my_data, new_range.handle,
+ range.handle, object_type);
}
return skip_call;
}
VkBool32 validate_buffer_image_aliasing(
- layer_data *my_data,
- uint64_t handle,
- VkDeviceMemory mem,
- VkDeviceSize memoryOffset,
- VkMemoryRequirements memRequirements,
- unordered_map<VkDeviceMemory, vector<MEMORY_RANGE>>& ranges,
- const unordered_map<VkDeviceMemory, vector<MEMORY_RANGE>>& other_ranges,
- VkDebugReportObjectTypeEXT object_type)
-{
+ layer_data *my_data, uint64_t handle, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset, VkMemoryRequirements memRequirements,
+ unordered_map<VkDeviceMemory, vector<MEMORY_RANGE>> &ranges,
+ const unordered_map<VkDeviceMemory, vector<MEMORY_RANGE>> &other_ranges,
+ VkDebugReportObjectTypeEXT object_type) {
MEMORY_RANGE range;
range.handle = handle;
range.memory = mem;
@@ -1793,87 +1888,97 @@
return validate_memory_range(my_data, other_ranges, range, object_type);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory mem,
- VkDeviceSize memoryOffset)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
loader_platform_thread_lock_mutex(&globalLock);
// Track objects tied to memory
uint64_t buffer_handle = (uint64_t)(buffer);
- VkBool32 skipCall = set_mem_binding(my_data, device, mem, buffer_handle, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, "vkBindBufferMemory");
- add_object_binding_info(my_data, buffer_handle, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, mem);
+ VkBool32 skipCall = set_mem_binding(my_data, device, mem, buffer_handle,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ "vkBindBufferMemory");
+ add_object_binding_info(my_data, buffer_handle,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, mem);
{
VkMemoryRequirements memRequirements;
vkGetBufferMemoryRequirements(device, buffer, &memRequirements);
- skipCall |= validate_buffer_image_aliasing(my_data, buffer_handle, mem, memoryOffset, memRequirements, my_data->bufferRanges, my_data->imageRanges, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
+ skipCall |= validate_buffer_image_aliasing(
+ my_data, buffer_handle, mem, memoryOffset, memRequirements,
+ my_data->bufferRanges, my_data->imageRanges,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT);
}
print_mem_list(my_data, device);
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->BindBufferMemory(device, buffer, mem, memoryOffset);
+ result = my_data->device_dispatch_table->BindBufferMemory(
+ device, buffer, mem, memoryOffset);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory mem,
- VkDeviceSize memoryOffset)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
loader_platform_thread_lock_mutex(&globalLock);
// Track objects tied to memory
uint64_t image_handle = (uint64_t)(image);
- VkBool32 skipCall = set_mem_binding(my_data, device, mem, image_handle, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, "vkBindImageMemory");
- add_object_binding_info(my_data, image_handle, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, mem);
+ VkBool32 skipCall = set_mem_binding(my_data, device, mem, image_handle,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ "vkBindImageMemory");
+ add_object_binding_info(my_data, image_handle,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, mem);
{
VkMemoryRequirements memRequirements;
vkGetImageMemoryRequirements(device, image, &memRequirements);
- skipCall |= validate_buffer_image_aliasing(my_data, image_handle, mem, memoryOffset, memRequirements, my_data->imageRanges, my_data->bufferRanges, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
+ skipCall |= validate_buffer_image_aliasing(
+ my_data, image_handle, mem, memoryOffset, memRequirements,
+ my_data->imageRanges, my_data->bufferRanges,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT);
}
print_mem_list(my_data, device);
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->BindImageMemory(device, image, mem, memoryOffset);
+ result = my_data->device_dispatch_table->BindImageMemory(
+ device, image, mem, memoryOffset);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements *pMemoryRequirements)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
+ VkMemoryRequirements *pMemoryRequirements) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
// TODO : What to track here?
- // Could potentially save returned mem requirements and validate values passed into BindBufferMemory
- my_data->device_dispatch_table->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+ // Could potentially save returned mem requirements and validate values
+ // passed into BindBufferMemory
+ my_data->device_dispatch_table->GetBufferMemoryRequirements(
+ device, buffer, pMemoryRequirements);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements *pMemoryRequirements)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetImageMemoryRequirements(VkDevice device, VkImage image,
+ VkMemoryRequirements *pMemoryRequirements) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
// TODO : What to track here?
- // Could potentially save returned mem requirements and validate values passed into BindImageMemory
- my_data->device_dispatch_table->GetImageMemoryRequirements(device, image, pMemoryRequirements);
+ // Could potentially save returned mem requirements and validate values
+ // passed into BindImageMemory
+ my_data->device_dispatch_table->GetImageMemoryRequirements(
+ device, image, pMemoryRequirements);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo *pBindInfo,
- VkFence fence)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
+ const VkBindSparseInfo *pBindInfo, VkFence fence) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
VkBool32 skipCall = VK_FALSE;
@@ -1882,29 +1987,38 @@
for (uint32_t i = 0; i < bindInfoCount; i++) {
// Track objects tied to memory
for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++) {
- for (uint32_t k = 0; k < pBindInfo[i].pBufferBinds[j].bindCount; k++) {
- if (set_sparse_mem_binding(my_data, queue,
- pBindInfo[i].pBufferBinds[j].pBinds[k].memory,
- (uint64_t) pBindInfo[i].pBufferBinds[j].buffer,
- VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, "vkQueueBindSparse"))
+ for (uint32_t k = 0; k < pBindInfo[i].pBufferBinds[j].bindCount;
+ k++) {
+ if (set_sparse_mem_binding(
+ my_data, queue,
+ pBindInfo[i].pBufferBinds[j].pBinds[k].memory,
+ (uint64_t)pBindInfo[i].pBufferBinds[j].buffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ "vkQueueBindSparse"))
skipCall = VK_TRUE;
}
}
for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++) {
- for (uint32_t k = 0; k < pBindInfo[i].pImageOpaqueBinds[j].bindCount; k++) {
- if (set_sparse_mem_binding(my_data, queue,
- pBindInfo[i].pImageOpaqueBinds[j].pBinds[k].memory,
- (uint64_t) pBindInfo[i].pImageOpaqueBinds[j].image,
- VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, "vkQueueBindSparse"))
+ for (uint32_t k = 0;
+ k < pBindInfo[i].pImageOpaqueBinds[j].bindCount; k++) {
+ if (set_sparse_mem_binding(
+ my_data, queue,
+ pBindInfo[i].pImageOpaqueBinds[j].pBinds[k].memory,
+ (uint64_t)pBindInfo[i].pImageOpaqueBinds[j].image,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ "vkQueueBindSparse"))
skipCall = VK_TRUE;
}
}
for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++) {
- for (uint32_t k = 0; k < pBindInfo[i].pImageBinds[j].bindCount; k++) {
- if (set_sparse_mem_binding(my_data, queue,
- pBindInfo[i].pImageBinds[j].pBinds[k].memory,
- (uint64_t) pBindInfo[i].pImageBinds[j].image,
- VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, "vkQueueBindSparse"))
+ for (uint32_t k = 0; k < pBindInfo[i].pImageBinds[j].bindCount;
+ k++) {
+ if (set_sparse_mem_binding(
+ my_data, queue,
+ pBindInfo[i].pImageBinds[j].pBinds[k].memory,
+ (uint64_t)pBindInfo[i].pImageBinds[j].image,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ "vkQueueBindSparse"))
skipCall = VK_TRUE;
}
}
@@ -1913,22 +2027,22 @@
print_mem_list(my_data, queue);
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+ result = my_data->device_dispatch_table->QueueBindSparse(
+ queue, bindInfoCount, pBindInfo, fence);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(
- VkDevice device,
- const VkFenceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkFence *pFence)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateFence(device, pCreateInfo, pAllocator, pFence);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateFence(
+ device, pCreateInfo, pAllocator, pFence);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
- MT_FENCE_INFO* pFI = &my_data->fenceMap[*pFence];
+ MT_FENCE_INFO *pFI = &my_data->fenceMap[*pFence];
memset(pFI, 0, sizeof(MT_FENCE_INFO));
memcpy(&(pFI->createInfo), pCreateInfo, sizeof(VkFenceCreateInfo));
if (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) {
@@ -1939,13 +2053,12 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence *pFences)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetFences(VkDevice device, uint32_t fenceCount,
+ const VkFence *pFences) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
@@ -1954,42 +2067,61 @@
auto fence_item = my_data->fenceMap.find(pFences[i]);
if (fence_item != my_data->fenceMap.end()) {
// Validate fences in SIGNALED state
- if (!(fence_item->second.createInfo.flags & VK_FENCE_CREATE_SIGNALED_BIT)) {
- // TODO: I don't see a Valid Usage section for ResetFences. This behavior should be documented there.
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, (uint64_t) pFences[i], __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
- "Fence %#" PRIxLEAST64 " submitted to VkResetFences in UNSIGNALED STATE", (uint64_t) pFences[i]);
- }
- else {
+ if (!(fence_item->second.createInfo.flags &
+ VK_FENCE_CREATE_SIGNALED_BIT)) {
+ // TODO: I don't see a Valid Usage section for ResetFences. This
+ // behavior should be documented there.
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, (uint64_t)pFences[i],
+ __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
+ "Fence %#" PRIxLEAST64
+ " submitted to VkResetFences in UNSIGNALED STATE",
+ (uint64_t)pFences[i]);
+ } else {
fence_item->second.createInfo.flags =
- static_cast<VkFenceCreateFlags>(fence_item->second.createInfo.flags & ~VK_FENCE_CREATE_SIGNALED_BIT);
+ static_cast<VkFenceCreateFlags>(
+ fence_item->second.createInfo.flags &
+ ~VK_FENCE_CREATE_SIGNALED_BIT);
}
}
}
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->ResetFences(device, fenceCount, pFences);
+ result = my_data->device_dispatch_table->ResetFences(device, fenceCount,
+ pFences);
}
return result;
}
-static inline VkBool32
-verifyFenceStatus(
- VkDevice device,
- VkFence fence,
- const char *apiCall)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+static inline VkBool32 verifyFenceStatus(VkDevice device, VkFence fence,
+ const char *apiCall) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skipCall = VK_FALSE;
auto pFenceInfo = my_data->fenceMap.find(fence);
if (pFenceInfo != my_data->fenceMap.end()) {
if (pFenceInfo->second.firstTimeFlag != VK_TRUE) {
- if ((pFenceInfo->second.createInfo.flags & VK_FENCE_CREATE_SIGNALED_BIT) && pFenceInfo->second.firstTimeFlag != VK_TRUE) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, (uint64_t) fence, __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
- "%s specified fence %#" PRIxLEAST64 " already in SIGNALED state.", apiCall, (uint64_t) fence);
+ if ((pFenceInfo->second.createInfo.flags &
+ VK_FENCE_CREATE_SIGNALED_BIT) &&
+ pFenceInfo->second.firstTimeFlag != VK_TRUE) {
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, (uint64_t)fence,
+ __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
+ "%s specified fence %#" PRIxLEAST64
+ " already in SIGNALED state.",
+ apiCall, (uint64_t)fence);
}
- if (!pFenceInfo->second.queue) { // Checking status of unsubmitted fence
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, (uint64_t) fence, __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
- "%s called for fence %#" PRIxLEAST64 " which has not been submitted on a Queue.", apiCall, (uint64_t) fence);
+ if (!pFenceInfo->second
+ .queue) { // Checking status of unsubmitted fence
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, (uint64_t)fence,
+ __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
+ "%s called for fence %#" PRIxLEAST64
+ " which has not been submitted on a Queue.",
+ apiCall, (uint64_t)fence);
}
} else {
pFenceInfo->second.firstTimeFlag = VK_FALSE;
@@ -1998,15 +2130,15 @@
return skipCall;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(
- VkDevice device,
- VkFence fence)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetFenceStatus(VkDevice device, VkFence fence) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skipCall = verifyFenceStatus(device, fence, "vkGetFenceStatus");
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = my_data->device_dispatch_table->GetFenceStatus(device, fence);
+ VkResult result =
+ my_data->device_dispatch_table->GetFenceStatus(device, fence);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
update_fence_tracking(my_data, fence);
@@ -2015,27 +2147,26 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence *pFences,
- VkBool32 waitAll,
- uint64_t timeout)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkWaitForFences(VkDevice device, uint32_t fenceCount,
+ const VkFence *pFences, VkBool32 waitAll,
+ uint64_t timeout) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skipCall = VK_FALSE;
// Verify fence status of submitted fences
- for(uint32_t i = 0; i < fenceCount; i++) {
+ for (uint32_t i = 0; i < fenceCount; i++) {
skipCall |= verifyFenceStatus(device, pFences[i], "vkWaitForFences");
}
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = my_data->device_dispatch_table->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
+ VkResult result = my_data->device_dispatch_table->WaitForFences(
+ device, fenceCount, pFences, waitAll, timeout);
loader_platform_thread_lock_mutex(&globalLock);
if (VK_SUCCESS == result) {
if (waitAll || fenceCount == 1) { // Clear all the fences
- for(uint32_t i = 0; i < fenceCount; i++) {
+ for (uint32_t i = 0; i < fenceCount; i++) {
update_fence_tracking(my_data, pFences[i]);
}
}
@@ -2044,10 +2175,9 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(
- VkQueue queue)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
VkResult result = my_data->device_dispatch_table->QueueWaitIdle(queue);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
@@ -2057,10 +2187,10 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(
- VkDevice device)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkDeviceWaitIdle(VkDevice device) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkResult result = my_data->device_dispatch_table->DeviceWaitIdle(device);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
@@ -2070,90 +2200,104 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkBuffer *pBuffer)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateBuffer(
+ device, pCreateInfo, pAllocator, pBuffer);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
- add_object_create_info(my_data, (uint64_t)*pBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, pCreateInfo);
+ add_object_create_info(my_data, (uint64_t)*pBuffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+ pCreateInfo);
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(
- VkDevice device,
- const VkImageCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkImage *pImage)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateImage(device, pCreateInfo, pAllocator, pImage);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateImage(
+ device, pCreateInfo, pAllocator, pImage);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
- add_object_create_info(my_data, (uint64_t)*pImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, pCreateInfo);
+ add_object_create_info(my_data, (uint64_t)*pImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ pCreateInfo);
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkImageView *pView)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateImageView(device, pCreateInfo, pAllocator, pView);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkImageView *pView) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateImageView(
+ device, pCreateInfo, pAllocator, pView);
if (result == VK_SUCCESS) {
loader_platform_thread_lock_mutex(&globalLock);
my_data->imageViewMap[*pView].image = pCreateInfo->image;
// Validate that img has correct usage flags set
- validate_image_usage_flags(my_data, device, pCreateInfo->image,
- VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
- VK_FALSE, "vkCreateImageView()", "VK_IMAGE_USAGE_[SAMPLED|STORAGE|COLOR_ATTACHMENT]_BIT");
+ validate_image_usage_flags(
+ my_data, device, pCreateInfo->image,
+ VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+ VK_FALSE, "vkCreateImageView()",
+ "VK_IMAGE_USAGE_[SAMPLED|STORAGE|COLOR_ATTACHMENT]_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkBufferView *pView)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateBufferView(device, pCreateInfo, pAllocator, pView);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateBufferView(VkDevice device,
+ const VkBufferViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkBufferView *pView) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateBufferView(
+ device, pCreateInfo, pAllocator, pView);
if (result == VK_SUCCESS) {
loader_platform_thread_lock_mutex(&globalLock);
- // In order to create a valid buffer view, the buffer must have been created with at least one of the
- // following flags: UNIFORM_TEXEL_BUFFER_BIT or STORAGE_TEXEL_BUFFER_BIT
- validate_buffer_usage_flags(my_data, device, pCreateInfo->buffer,
- VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
- VK_FALSE, "vkCreateBufferView()", "VK_BUFFER_USAGE_[STORAGE|UNIFORM]_TEXEL_BUFFER_BIT");
+ // In order to create a valid buffer view, the buffer must have been
+ // created with at least one of the
+ // following flags: UNIFORM_TEXEL_BUFFER_BIT or
+ // STORAGE_TEXEL_BUFFER_BIT
+ validate_buffer_usage_flags(
+ my_data, device, pCreateInfo->buffer,
+ VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
+ VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+ VK_FALSE, "vkCreateBufferView()",
+ "VK_BUFFER_USAGE_[STORAGE|UNIFORM]_TEXEL_BUFFER_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo *pCreateInfo,
- VkCommandBuffer *pCommandBuffer)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->AllocateCommandBuffers(device, pCreateInfo, pCommandBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAllocateCommandBuffers(VkDevice device,
+ const VkCommandBufferAllocateInfo *pCreateInfo,
+ VkCommandBuffer *pCommandBuffer) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->AllocateCommandBuffers(
+ device, pCreateInfo, pCommandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
if (VK_SUCCESS == result) {
for (uint32_t i = 0; i < pCreateInfo->commandBufferCount; i++) {
- add_cmd_buf_info(my_data, pCreateInfo->commandPool, pCommandBuffer[i]);
+ add_cmd_buf_info(my_data, pCreateInfo->commandPool,
+ pCommandBuffer[i]);
}
}
loader_platform_thread_unlock_mutex(&globalLock);
@@ -2161,35 +2305,37 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer *pCommandBuffers)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VkCommandBuffer *pCommandBuffers) {
VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
loader_platform_thread_lock_mutex(&globalLock);
for (uint32_t i = 0; i < commandBufferCount; i++) {
- skipCall |= delete_cmd_buf_info(my_data, commandPool, pCommandBuffers[i]);
+ skipCall |=
+ delete_cmd_buf_info(my_data, commandPool, pCommandBuffers[i]);
}
printCBList(my_data, device);
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->FreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+ my_data->device_dispatch_table->FreeCommandBuffers(
+ device, commandPool, commandBufferCount, pCommandBuffers);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkCommandPool *pCommandPool)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateCommandPool(VkDevice device,
+ const VkCommandPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkCommandPool *pCommandPool) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateCommandPool(
+ device, pCreateInfo, pAllocator, pCommandPool);
loader_platform_thread_lock_mutex(&globalLock);
@@ -2200,29 +2346,33 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks *pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+ const VkAllocationCallbacks *pAllocator) {
VkBool32 commandBufferComplete = VK_FALSE;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
// Verify that command buffers in pool are complete (not in-flight)
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
for (auto it = my_data->commandPoolMap[commandPool].pCommandBuffers.begin();
- it != my_data->commandPoolMap[commandPool].pCommandBuffers.end(); it++) {
+ it != my_data->commandPoolMap[commandPool].pCommandBuffers.end();
+ it++) {
commandBufferComplete = VK_FALSE;
skipCall = checkCBCompleted(my_data, *it, &commandBufferComplete);
if (VK_FALSE == commandBufferComplete) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(*it), __LINE__,
- MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, "MEM", "Destroying Command Pool 0x%" PRIxLEAST64 " before "
- "its command buffer (0x%" PRIxLEAST64 ") has completed.", (uint64_t)(commandPool),
- reinterpret_cast<uint64_t>(*it));
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(*it),
+ __LINE__, MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, "MEM",
+ "Destroying Command Pool 0x%" PRIxLEAST64 " before "
+ "its command buffer (0x%" PRIxLEAST64 ") has completed.",
+ (uint64_t)(commandPool), reinterpret_cast<uint64_t>(*it));
}
}
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->DestroyCommandPool(device, commandPool, pAllocator);
+ my_data->device_dispatch_table->DestroyCommandPool(device, commandPool,
+ pAllocator);
}
loader_platform_thread_lock_mutex(&globalLock);
@@ -2236,24 +2386,27 @@
loader_platform_thread_unlock_mutex(&globalLock);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkBool32 commandBufferComplete = VK_FALSE;
- VkBool32 skipCall = VK_FALSE;
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetCommandPool(VkDevice device, VkCommandPool commandPool,
+ VkCommandPoolResetFlags flags) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkBool32 commandBufferComplete = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
auto it = my_data->commandPoolMap[commandPool].pCommandBuffers.begin();
// Verify that CB's in pool are complete (not in-flight)
while (it != my_data->commandPoolMap[commandPool].pCommandBuffers.end()) {
skipCall = checkCBCompleted(my_data, (*it), &commandBufferComplete);
if (VK_FALSE == commandBufferComplete) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(*it), __LINE__,
- MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, "MEM", "Resetting CB %p before it has completed. You must check CB "
- "flag before calling vkResetCommandBuffer().", (*it));
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(*it),
+ __LINE__, MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, "MEM",
+ "Resetting CB %p before it has completed. You must check CB "
+ "flag before calling vkResetCommandBuffer().",
+ (*it));
} else {
loader_platform_thread_lock_mutex(&globalLock);
// Clear memory references at this point.
@@ -2264,33 +2417,41 @@
}
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->ResetCommandPool(device, commandPool, flags);
+ result = my_data->device_dispatch_table->ResetCommandPool(
+ device, commandPool, flags);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo *pBeginInfo)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
- VkBool32 skipCall = VK_FALSE;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkBeginCommandBuffer(VkCommandBuffer commandBuffer,
+ const VkCommandBufferBeginInfo *pBeginInfo) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+ VkBool32 skipCall = VK_FALSE;
VkBool32 commandBufferComplete = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
- // This implicitly resets the Cmd Buffer so make sure any fence is done and then clear memory references
+ // This implicitly resets the Cmd Buffer so make sure any fence is done and
+ // then clear memory references
skipCall = checkCBCompleted(my_data, commandBuffer, &commandBufferComplete);
if (VK_FALSE == commandBufferComplete) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__,
- MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, "MEM", "Calling vkBeginCommandBuffer() on active CB %p before it has completed. "
- "You must check CB flag before this call.", commandBuffer);
+ skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, "MEM",
+ "Calling vkBeginCommandBuffer() on active CB %p "
+ "before it has completed. "
+ "You must check CB flag before this call.",
+ commandBuffer);
}
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->BeginCommandBuffer(commandBuffer, pBeginInfo);
+ result = my_data->device_dispatch_table->BeginCommandBuffer(
+ commandBuffer, pBeginInfo);
}
loader_platform_thread_lock_mutex(&globalLock);
clear_cmd_buf_and_mem_references(my_data, commandBuffer);
@@ -2298,49 +2459,57 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(
- VkCommandBuffer commandBuffer)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEndCommandBuffer(VkCommandBuffer commandBuffer) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
// TODO : Anything to do here?
- VkResult result = my_data->device_dispatch_table->EndCommandBuffer(commandBuffer);
+ VkResult result =
+ my_data->device_dispatch_table->EndCommandBuffer(commandBuffer);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
- VkBool32 skipCall = VK_FALSE;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetCommandBuffer(VkCommandBuffer commandBuffer,
+ VkCommandBufferResetFlags flags) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+ VkBool32 skipCall = VK_FALSE;
VkBool32 commandBufferComplete = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
// Verify that CB is complete (not in-flight)
skipCall = checkCBCompleted(my_data, commandBuffer, &commandBufferComplete);
if (VK_FALSE == commandBufferComplete) {
- skipCall |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__,
- MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, "MEM", "Resetting CB %p before it has completed. You must check CB "
- "flag before calling vkResetCommandBuffer().", commandBuffer);
+ skipCall |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__,
+ MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, "MEM",
+ "Resetting CB %p before it has completed. You must check CB "
+ "flag before calling vkResetCommandBuffer().",
+ commandBuffer);
}
// Clear memory references as this point.
skipCall |= clear_cmd_buf_and_mem_references(my_data, commandBuffer);
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->ResetCommandBuffer(commandBuffer, flags);
+ result = my_data->device_dispatch_table->ResetCommandBuffer(
+ commandBuffer, flags);
}
return result;
}
-// TODO : For any vkCmdBind* calls that include an object which has mem bound to it,
+// TODO : For any vkCmdBind* calls that include an object which has mem bound to
+// it,
// need to account for that mem now having binding to given commandBuffer
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindPipeline(VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipeline pipeline) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
#if 0
// TODO : If memory bound to pipeline, then need to tie that mem to commandBuffer
if (getPipeline(pipeline)) {
@@ -2354,540 +2523,662 @@
layerCbMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, pipeline, __LINE__, MEMTRACK_INVALID_OBJECT, (char *) "DS", (char *) str);
}
#endif
- my_data->device_dispatch_table->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+ my_data->device_dispatch_table->CmdBindPipeline(
+ commandBuffer, pipelineBindPoint, pipeline);
}
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t setCount,
- const VkDescriptorSet *pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t *pDynamicOffsets)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- // TODO : Somewhere need to verify that all textures referenced by shaders in DS are in some type of *SHADER_READ* state
+ VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+ VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
+ const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
+ const uint32_t *pDynamicOffsets) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ // TODO : Somewhere need to verify that all textures referenced by shaders
+ // in DS are in some type of *SHADER_READ* state
my_data->device_dispatch_table->CmdBindDescriptorSets(
- commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+ commandBuffer, pipelineBindPoint, layout, firstSet, setCount,
+ pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer *pBuffers,
- const VkDeviceSize *pOffsets)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
+ uint32_t bindingCount, const VkBuffer *pBuffers,
+ const VkDeviceSize *pOffsets) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkBool32 skip_call = false;
for (uint32_t i = 0; i < bindingCount; ++i) {
VkDeviceMemory mem;
- skip_call |= get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)(pBuffers[i]),
+ skip_call |= get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)(pBuffers[i]),
VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
auto cb_data = my_data->cbMap.find(commandBuffer);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, mem, "vkCmdBindVertexBuffers()"); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(my_data, mem,
+ "vkCmdBindVertexBuffers()");
+ };
cb_data->second.validate_functions.push_back(function);
}
}
// TODO : Somewhere need to verify that VBs have correct usage state flagged
if (!skip_call)
- my_data->device_dispatch_table->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+ my_data->device_dispatch_table->CmdBindVertexBuffers(
+ commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, VkIndexType indexType) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skip_call = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)(buffer), VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ VkBool32 skip_call = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)(buffer),
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
auto cb_data = my_data->cbMap.find(commandBuffer);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, mem, "vkCmdBindIndexBuffer()"); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(my_data, mem,
+ "vkCmdBindIndexBuffer()");
+ };
cb_data->second.validate_functions.push_back(function);
}
// TODO : Somewhere need to verify that IBs have correct usage state flagged
if (!skip_call)
- my_data->device_dispatch_table->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+ my_data->device_dispatch_table->CmdBindIndexBuffer(
+ commandBuffer, buffer, offset, indexType);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t count,
- uint32_t stride)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, uint32_t count, uint32_t stride) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
loader_platform_thread_lock_mutex(&globalLock);
- VkBool32 skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdDrawIndirect");
+ VkBool32 skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)buffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdDrawIndirect");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->CmdDrawIndirect(commandBuffer, buffer, offset, count, stride);
+ my_data->device_dispatch_table->CmdDrawIndirect(commandBuffer, buffer,
+ offset, count, stride);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t count,
- uint32_t stride)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, uint32_t count,
+ uint32_t stride) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
loader_platform_thread_lock_mutex(&globalLock);
- VkBool32 skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdDrawIndexedIndirect");
+ VkBool32 skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)buffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdDrawIndexedIndirect");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, count, stride);
+ my_data->device_dispatch_table->CmdDrawIndexedIndirect(
+ commandBuffer, buffer, offset, count, stride);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
loader_platform_thread_lock_mutex(&globalLock);
- VkBool32 skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdDispatchIndirect");
+ VkBool32 skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)buffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdDispatchIndirect");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->CmdDispatchIndirect(commandBuffer, buffer, offset);
+ my_data->device_dispatch_table->CmdDispatchIndirect(commandBuffer,
+ buffer, offset);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy *pRegions)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkBuffer dstBuffer, uint32_t regionCount,
+ const VkBufferCopy *pRegions) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)srcBuffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, mem, "vkCmdCopyBuffer()"); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(my_data, mem, "vkCmdCopyBuffer()");
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyBuffer");
- skipCall |= get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyBuffer");
+ skipCall |= get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstBuffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyBuffer");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyBuffer");
// Validate that SRC & DST buffers have correct usage flags set
- skipCall |= validate_buffer_usage_flags(my_data, commandBuffer, srcBuffer, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true, "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
- skipCall |= validate_buffer_usage_flags(my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ skipCall |= validate_buffer_usage_flags(
+ my_data, commandBuffer, srcBuffer, VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ true, "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
+ skipCall |= validate_buffer_usage_flags(
+ my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ true, "vkCmdCopyBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+ my_data->device_dispatch_table->CmdCopyBuffer(
+ commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize destStride,
- VkQueryResultFlags flags)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
+ VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize destStride,
+ VkQueryResultFlags flags) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
- skipCall |= get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall |= get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstBuffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyQueryPoolResults");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyQueryPoolResults");
// Validate that DST buffer has correct usage flags set
- skipCall |= validate_buffer_usage_flags(my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "vkCmdCopyQueryPoolResults()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ skipCall |= validate_buffer_usage_flags(my_data, commandBuffer, dstBuffer,
+ VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ true, "vkCmdCopyQueryPoolResults()",
+ "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, destStride, flags);
+ my_data->device_dispatch_table->CmdCopyQueryPoolResults(
+ commandBuffer, queryPool, firstQuery, queryCount, dstBuffer,
+ dstOffset, destStride, flags);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy *pRegions)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageCopy *pRegions) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
// Validate that src & dst images have correct usage flags set
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)srcImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, mem, "vkCmdCopyImage()", srcImage); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(my_data, mem, "vkCmdCopyImage()",
+ srcImage);
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyImage");
- skipCall |= get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyImage");
+ skipCall |= get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true, dstImage); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true, dstImage);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyImage");
- skipCall |= validate_image_usage_flags(my_data, commandBuffer, srcImage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
- skipCall |= validate_image_usage_flags(my_data, commandBuffer, dstImage, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyImage");
+ skipCall |= validate_image_usage_flags(
+ my_data, commandBuffer, srcImage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
+ "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+ skipCall |= validate_image_usage_flags(
+ my_data, commandBuffer, dstImage, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
+ "vkCmdCopyImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
my_data->device_dispatch_table->CmdCopyImage(
- commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit *pRegions,
- VkFilter filter)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageBlit *pRegions, VkFilter filter) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
// Validate that src & dst images have correct usage flags set
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)srcImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, mem, "vkCmdBlitImage()", srcImage); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(my_data, mem, "vkCmdBlitImage()",
+ srcImage);
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdBlitImage");
- skipCall |= get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);\
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdBlitImage");
+ skipCall |= get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true, dstImage); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true, dstImage);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdBlitImage");
- skipCall |= validate_image_usage_flags(my_data, commandBuffer, srcImage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
- skipCall |= validate_image_usage_flags(my_data, commandBuffer, dstImage, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdBlitImage");
+ skipCall |= validate_image_usage_flags(
+ my_data, commandBuffer, srcImage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
+ "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+ skipCall |= validate_image_usage_flags(
+ my_data, commandBuffer, dstImage, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
+ "vkCmdBlitImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
my_data->device_dispatch_table->CmdBlitImage(
- commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions, filter);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy *pRegions)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkImage dstImage, VkImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VkBufferImageCopy *pRegions) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true, dstImage); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true, dstImage);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyBufferToImage");
- skipCall |= get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)srcBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyBufferToImage");
+ skipCall |= get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)srcBuffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, mem, "vkCmdCopyBufferToImage()"); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(my_data, mem,
+ "vkCmdCopyBufferToImage()");
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyBufferToImage");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyBufferToImage");
// Validate that src buff & dst image have correct usage flags set
- skipCall |= validate_buffer_usage_flags(my_data, commandBuffer, srcBuffer, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, true, "vkCmdCopyBufferToImage()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
- skipCall |= validate_image_usage_flags(my_data, commandBuffer, dstImage, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true, "vkCmdCopyBufferToImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
+ skipCall |= validate_buffer_usage_flags(
+ my_data, commandBuffer, srcBuffer, VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+ true, "vkCmdCopyBufferToImage()", "VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
+ skipCall |= validate_image_usage_flags(
+ my_data, commandBuffer, dstImage, VK_IMAGE_USAGE_TRANSFER_DST_BIT, true,
+ "vkCmdCopyBufferToImage()", "VK_IMAGE_USAGE_TRANSFER_DST_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
my_data->device_dispatch_table->CmdCopyBufferToImage(
- commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+ commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount,
+ pRegions);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy *pRegions)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+ uint32_t regionCount,
+ const VkBufferImageCopy *pRegions) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)srcImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, mem, "vkCmdCopyImageToBuffer()", srcImage); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(
+ my_data, mem, "vkCmdCopyImageToBuffer()", srcImage);
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyImageToBuffer");
- skipCall |= get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyImageToBuffer");
+ skipCall |= get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstBuffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdCopyImageToBuffer");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdCopyImageToBuffer");
// Validate that dst buff & src image have correct usage flags set
- skipCall |= validate_image_usage_flags(my_data, commandBuffer, srcImage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true, "vkCmdCopyImageToBuffer()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
- skipCall |= validate_buffer_usage_flags(my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "vkCmdCopyImageToBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ skipCall |= validate_image_usage_flags(
+ my_data, commandBuffer, srcImage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true,
+ "vkCmdCopyImageToBuffer()", "VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
+ skipCall |= validate_buffer_usage_flags(
+ my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ true, "vkCmdCopyImageToBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
my_data->device_dispatch_table->CmdCopyImageToBuffer(
- commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+ commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount,
+ pRegions);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const uint32_t *pData)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize dataSize,
+ const uint32_t *pData) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstBuffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdUpdateBuffer");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdUpdateBuffer");
// Validate that dst buff has correct usage flags set
- skipCall |= validate_buffer_usage_flags(my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "vkCmdUpdateBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ skipCall |= validate_buffer_usage_flags(
+ my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ true, "vkCmdUpdateBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+ my_data->device_dispatch_table->CmdUpdateBuffer(
+ commandBuffer, dstBuffer, dstOffset, dataSize, pData);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstBuffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstBuffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdFillBuffer");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdFillBuffer");
// Validate that dst buff has correct usage flags set
- skipCall |= validate_buffer_usage_flags(my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT, true, "vkCmdFillBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
+ skipCall |= validate_buffer_usage_flags(
+ my_data, commandBuffer, dstBuffer, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+ true, "vkCmdFillBuffer()", "VK_BUFFER_USAGE_TRANSFER_DST_BIT");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+ my_data->device_dispatch_table->CmdFillBuffer(commandBuffer, dstBuffer,
+ dstOffset, size, data);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue *pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange *pRanges)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout,
+ const VkClearColorValue *pColor, uint32_t rangeCount,
+ const VkImageSubresourceRange *pRanges) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
// TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)image,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true, image); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true, image);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdClearColorImage");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdClearColorImage");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+ my_data->device_dispatch_table->CmdClearColorImage(
+ commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue *pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange *pRanges)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout,
+ const VkClearDepthStencilValue *pDepthStencil,
+ uint32_t rangeCount,
+ const VkImageSubresourceRange *pRanges) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
// TODO : Verify memory is in VK_IMAGE_STATE_CLEAR state
VkDeviceMemory mem;
- VkBool32 skipCall = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)image,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true, image); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true, image);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdClearDepthStencilImage");
+ skipCall |= update_cmd_buf_and_mem_references(
+ my_data, commandBuffer, mem, "vkCmdClearDepthStencilImage");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
my_data->device_dispatch_table->CmdClearDepthStencilImage(
- commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+ commandBuffer, image, imageLayout, pDepthStencil, rangeCount,
+ pRanges);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve *pRegions)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageResolve *pRegions) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
VkBool32 skipCall = VK_FALSE;
auto cb_data = my_data->cbMap.find(commandBuffer);
loader_platform_thread_lock_mutex(&globalLock);
VkDeviceMemory mem;
- skipCall = get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)srcImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall = get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)srcImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, mem, "vkCmdResolveImage()", srcImage); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(my_data, mem, "vkCmdResolveImage()",
+ srcImage);
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdResolveImage");
- skipCall |= get_mem_binding_from_object(my_data, commandBuffer, (uint64_t)dstImage, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdResolveImage");
+ skipCall |= get_mem_binding_from_object(
+ my_data, commandBuffer, (uint64_t)dstImage,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, mem, true, dstImage); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, mem, true, dstImage);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem, "vkCmdResolveImage");
+ skipCall |= update_cmd_buf_and_mem_references(my_data, commandBuffer, mem,
+ "vkCmdResolveImage");
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
my_data->device_dispatch_table->CmdResolveImage(
- commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
+ regionCount, pRegions);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t slot,
- VkFlags flags)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- my_data->device_dispatch_table->CmdBeginQuery(commandBuffer, queryPool, slot, flags);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t slot, VkFlags flags) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ my_data->device_dispatch_table->CmdBeginQuery(commandBuffer, queryPool,
+ slot, flags);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t slot)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t slot) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
my_data->device_dispatch_table->CmdEndQuery(commandBuffer, queryPool, slot);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
- my_data->device_dispatch_table->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t firstQuery, uint32_t queryCount) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
+ my_data->device_dispatch_table->CmdResetQueryPool(commandBuffer, queryPool,
+ firstQuery, queryCount);
}
VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pMsgCallback)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDebugReportCallbackEXT *pMsgCallback) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
- VkResult res = pTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
+ VkResult res = pTable->CreateDebugReportCallbackEXT(
+ instance, pCreateInfo, pAllocator, pMsgCallback);
if (res == VK_SUCCESS) {
- res = layer_create_msg_callback(my_data->report_data, pCreateInfo, pAllocator, pMsgCallback);
+ res = layer_create_msg_callback(my_data->report_data, pCreateInfo,
+ pAllocator, pMsgCallback);
}
return res;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT msgCallback,
- const VkAllocationCallbacks* pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDebugReportCallbackEXT(VkInstance instance,
+ VkDebugReportCallbackEXT msgCallback,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
pTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
layer_destroy_msg_callback(my_data->report_data, msgCallback, pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objType,
- uint64_t object,
- size_t location,
- int32_t msgCode,
- const char* pLayerPrefix,
- const char* pMsg)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- my_data->instance_dispatch_table->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
+ VkDebugReportObjectTypeEXT objType, uint64_t object,
+ size_t location, int32_t msgCode,
+ const char *pLayerPrefix, const char *pMsg) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ my_data->instance_dispatch_table->DebugReportMessageEXT(
+ instance, flags, objType, object, location, msgCode, pLayerPrefix,
+ pMsg);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkSwapchainKHR *pSwapchain)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateSwapchainKHR(VkDevice device,
+ const VkSwapchainCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSwapchainKHR *pSwapchain) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateSwapchainKHR(
+ device, pCreateInfo, pAllocator, pSwapchain);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&globalLock);
@@ -2898,20 +3189,22 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks *pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
if (my_data->swapchainMap.find(swapchain) != my_data->swapchainMap.end()) {
- MT_SWAP_CHAIN_INFO* pInfo = my_data->swapchainMap[swapchain];
+ MT_SWAP_CHAIN_INFO *pInfo = my_data->swapchainMap[swapchain];
if (pInfo->images.size() > 0) {
- for (auto it = pInfo->images.begin(); it != pInfo->images.end(); it++) {
- skipCall = clear_object_binding(my_data, device, (uint64_t)*it, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
+ for (auto it = pInfo->images.begin(); it != pInfo->images.end();
+ it++) {
+ skipCall = clear_object_binding(
+ my_data, device, (uint64_t)*it,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT);
auto image_item = my_data->imageMap.find((uint64_t)*it);
if (image_item != my_data->imageMap.end())
my_data->imageMap.erase(image_item);
@@ -2922,18 +3215,18 @@
}
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- my_data->device_dispatch_table->DestroySwapchainKHR(device, swapchain, pAllocator);
+ my_data->device_dispatch_table->DestroySwapchainKHR(device, swapchain,
+ pAllocator);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t *pCount,
- VkImage *pSwapchainImages)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->GetSwapchainImagesKHR(device, swapchain, pCount, pSwapchainImages);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain,
+ uint32_t *pCount, VkImage *pSwapchainImages) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->GetSwapchainImagesKHR(
+ device, swapchain, pCount, pSwapchainImages);
if (result == VK_SUCCESS && pSwapchainImages != NULL) {
const size_t count = *pCount;
@@ -2941,76 +3234,95 @@
if (pInfo->images.empty()) {
pInfo->images.resize(count);
- memcpy(&pInfo->images[0], pSwapchainImages, sizeof(pInfo->images[0]) * count);
+ memcpy(&pInfo->images[0], pSwapchainImages,
+ sizeof(pInfo->images[0]) * count);
if (pInfo->images.size() > 0) {
- for (std::vector<VkImage>::const_iterator it = pInfo->images.begin();
+ for (std::vector<VkImage>::const_iterator it =
+ pInfo->images.begin();
it != pInfo->images.end(); it++) {
- // Add image object binding, then insert the new Mem Object and then bind it to created image
- add_object_create_info(my_data, (uint64_t)*it, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, &pInfo->createInfo);
+ // Add image object binding, then insert the new Mem Object
+ // and then bind it to created image
+ add_object_create_info(
+ my_data, (uint64_t)*it,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ &pInfo->createInfo);
}
}
} else {
const size_t count = *pCount;
MT_SWAP_CHAIN_INFO *pInfo = my_data->swapchainMap[swapchain];
- const VkBool32 mismatch = (pInfo->images.size() != count ||
- memcmp(&pInfo->images[0], pSwapchainImages, sizeof(pInfo->images[0]) * count));
+ const VkBool32 mismatch =
+ (pInfo->images.size() != count ||
+ memcmp(&pInfo->images[0], pSwapchainImages,
+ sizeof(pInfo->images[0]) * count));
if (mismatch) {
// TODO: Verify against Valid Usage section of extension
- log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, (uint64_t) swapchain, __LINE__, MEMTRACK_NONE, "SWAP_CHAIN",
- "vkGetSwapchainInfoKHR(%" PRIu64 ", VK_SWAP_CHAIN_INFO_TYPE_PERSISTENT_IMAGES_KHR) returned mismatching data", (uint64_t)(swapchain));
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_WARN_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ (uint64_t)swapchain, __LINE__, MEMTRACK_NONE,
+ "SWAP_CHAIN",
+ "vkGetSwapchainInfoKHR(%" PRIu64
+ ", VK_SWAP_CHAIN_INFO_TYPE_PERSISTENT_IMAGES_KHR) "
+ "returned mismatching data",
+ (uint64_t)(swapchain));
}
}
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t *pImageIndex)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain,
+ uint64_t timeout, VkSemaphore semaphore,
+ VkFence fence, uint32_t *pImageIndex) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&globalLock);
if (my_data->semaphoreMap.find(semaphore) != my_data->semaphoreMap.end()) {
- if (my_data->semaphoreMap[semaphore] != MEMTRACK_SEMAPHORE_STATE_UNSET) {
- skipCall = log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, (uint64_t)semaphore,
- __LINE__, MEMTRACK_NONE, "SEMAPHORE",
- "vkAcquireNextImageKHR: Semaphore must not be currently signaled or in a wait state");
+ if (my_data->semaphoreMap[semaphore] !=
+ MEMTRACK_SEMAPHORE_STATE_UNSET) {
+ skipCall = log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, (uint64_t)semaphore,
+ __LINE__, MEMTRACK_NONE, "SEMAPHORE",
+ "vkAcquireNextImageKHR: Semaphore must not be currently "
+ "signaled or in a wait state");
}
my_data->semaphoreMap[semaphore] = MEMTRACK_SEMAPHORE_STATE_SIGNALLED;
}
loader_platform_thread_unlock_mutex(&globalLock);
if (VK_FALSE == skipCall) {
- result = my_data->device_dispatch_table->AcquireNextImageKHR(device,
- swapchain, timeout, semaphore, fence, pImageIndex);
+ result = my_data->device_dispatch_table->AcquireNextImageKHR(
+ device, swapchain, timeout, semaphore, fence, pImageIndex);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
VkBool32 skip_call = false;
VkDeviceMemory mem;
for (uint32_t i = 0; i < pPresentInfo->swapchainCount; ++i) {
- MT_SWAP_CHAIN_INFO *pInfo = my_data->swapchainMap[pPresentInfo->pSwapchains[i]];
+ MT_SWAP_CHAIN_INFO *pInfo =
+ my_data->swapchainMap[pPresentInfo->pSwapchains[i]];
VkImage image = pInfo->images[pPresentInfo->pImageIndices[i]];
- skip_call |= get_mem_binding_from_object(my_data, queue, (uint64_t)(image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
- skip_call |= validate_memory_is_valid(my_data, mem, "vkQueuePresentKHR()", image);
+ skip_call |= get_mem_binding_from_object(
+ my_data, queue, (uint64_t)(image),
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &mem);
+ skip_call |= validate_memory_is_valid(my_data, mem,
+ "vkQueuePresentKHR()", image);
}
if (!skip_call) {
- result = my_data->device_dispatch_table->QueuePresentKHR(queue, pPresentInfo);
+ result = my_data->device_dispatch_table->QueuePresentKHR(queue,
+ pPresentInfo);
}
loader_platform_thread_lock_mutex(&globalLock);
@@ -3025,14 +3337,14 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkSemaphore *pSemaphore)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSemaphore *pSemaphore) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateSemaphore(
+ device, pCreateInfo, pAllocator, pSemaphore);
loader_platform_thread_lock_mutex(&globalLock);
if (*pSemaphore != VK_NULL_HANDLE) {
my_data->semaphoreMap[*pSemaphore] = MEMTRACK_SEMAPHORE_STATE_UNSET;
@@ -3041,29 +3353,30 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(
- VkDevice device,
- VkSemaphore semaphore,
- const VkAllocationCallbacks *pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroySemaphore(VkDevice device, VkSemaphore semaphore,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
loader_platform_thread_lock_mutex(&globalLock);
auto item = my_data->semaphoreMap.find(semaphore);
if (item != my_data->semaphoreMap.end()) {
my_data->semaphoreMap.erase(item);
}
loader_platform_thread_unlock_mutex(&globalLock);
- my_data->device_dispatch_table->DestroySemaphore(device, semaphore, pAllocator);
+ my_data->device_dispatch_table->DestroySemaphore(device, semaphore,
+ pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateFramebuffer(VkDevice device,
+ const VkFramebufferCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkFramebuffer *pFramebuffer) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateFramebuffer(
+ device, pCreateInfo, pAllocator, pFramebuffer);
for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
VkImageView view = pCreateInfo->pAttachments[i];
loader_platform_thread_lock_mutex(&globalLock);
@@ -3073,7 +3386,9 @@
continue;
}
MT_FB_ATTACHMENT_INFO fb_info;
- get_mem_binding_from_object(my_data, device, (uint64_t)(view_data->second.image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &fb_info.mem);
+ get_mem_binding_from_object(
+ my_data, device, (uint64_t)(view_data->second.image),
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, &fb_info.mem);
fb_info.image = view_data->second.image;
my_data->fbMap[*pFramebuffer].attachments.push_back(fb_info);
loader_platform_thread_unlock_mutex(&globalLock);
@@ -3081,12 +3396,11 @@
return result;
}
-VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(
- VkDevice device,
- VkFramebuffer framebuffer,
- const VkAllocationCallbacks* pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+VKAPI_ATTR void VKAPI_CALL
+ vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
auto item = my_data->fbMap.find(framebuffer);
loader_platform_thread_lock_mutex(&globalLock);
@@ -3095,17 +3409,19 @@
}
loader_platform_thread_unlock_mutex(&globalLock);
- my_data->device_dispatch_table->DestroyFramebuffer(device, framebuffer, pAllocator);
+ my_data->device_dispatch_table->DestroyFramebuffer(device, framebuffer,
+ pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkResult result = my_data->device_dispatch_table->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateRenderPass(VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkRenderPass *pRenderPass) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkResult result = my_data->device_dispatch_table->CreateRenderPass(
+ device, pCreateInfo, pAllocator, pRenderPass);
for (uint32_t i = 0; i < pCreateInfo->attachmentCount; ++i) {
VkAttachmentDescription desc = pCreateInfo->pAttachments[i];
MT_PASS_ATTACHMENT_INFO pass_info;
@@ -3116,29 +3432,39 @@
my_data->passMap[*pRenderPass].attachments.push_back(pass_info);
loader_platform_thread_unlock_mutex(&globalLock);
}
- //TODO: Maybe fill list and then copy instead of locking
+ // TODO: Maybe fill list and then copy instead of locking
loader_platform_thread_lock_mutex(&globalLock);
- std::unordered_map<uint32_t, bool>& attachment_first_read = my_data->passMap[*pRenderPass].attachment_first_read;
- std::unordered_map<uint32_t, VkImageLayout>& attachment_first_layout = my_data->passMap[*pRenderPass].attachment_first_layout;
+ std::unordered_map<uint32_t, bool> &attachment_first_read =
+ my_data->passMap[*pRenderPass].attachment_first_read;
+ std::unordered_map<uint32_t, VkImageLayout> &attachment_first_layout =
+ my_data->passMap[*pRenderPass].attachment_first_layout;
for (uint32_t i = 0; i < pCreateInfo->subpassCount; ++i) {
- const VkSubpassDescription& subpass = pCreateInfo->pSubpasses[i];
+ const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[i];
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
uint32_t attachment = subpass.pInputAttachments[j].attachment;
- if (attachment_first_read.count(attachment)) continue;
+ if (attachment_first_read.count(attachment))
+ continue;
attachment_first_read.insert(std::make_pair(attachment, true));
- attachment_first_layout.insert(std::make_pair(attachment, subpass.pInputAttachments[j].layout));
+ attachment_first_layout.insert(std::make_pair(
+ attachment, subpass.pInputAttachments[j].layout));
}
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
uint32_t attachment = subpass.pColorAttachments[j].attachment;
- if (attachment_first_read.count(attachment)) continue;
+ if (attachment_first_read.count(attachment))
+ continue;
attachment_first_read.insert(std::make_pair(attachment, false));
- attachment_first_layout.insert(std::make_pair(attachment, subpass.pColorAttachments[j].layout));
+ attachment_first_layout.insert(std::make_pair(
+ attachment, subpass.pColorAttachments[j].layout));
}
- if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
+ if (subpass.pDepthStencilAttachment &&
+ subpass.pDepthStencilAttachment->attachment !=
+ VK_ATTACHMENT_UNUSED) {
uint32_t attachment = subpass.pDepthStencilAttachment->attachment;
- if (attachment_first_read.count(attachment)) continue;
+ if (attachment_first_read.count(attachment))
+ continue;
attachment_first_read.insert(std::make_pair(attachment, false));
- attachment_first_layout.insert(std::make_pair(attachment, subpass.pDepthStencilAttachment->layout));
+ attachment_first_layout.insert(std::make_pair(
+ attachment, subpass.pDepthStencilAttachment->layout));
}
}
loader_platform_thread_unlock_mutex(&globalLock);
@@ -3146,48 +3472,79 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(
- VkCommandBuffer cmdBuffer,
- const VkRenderPassBeginInfo *pRenderPassBegin,
- VkSubpassContents contents)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBeginRenderPass(VkCommandBuffer cmdBuffer,
+ const VkRenderPassBeginInfo *pRenderPassBegin,
+ VkSubpassContents contents) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
VkBool32 skip_call = false;
if (pRenderPassBegin) {
loader_platform_thread_lock_mutex(&globalLock);
auto pass_data = my_data->passMap.find(pRenderPassBegin->renderPass);
if (pass_data != my_data->passMap.end()) {
- MT_PASS_INFO& pass_info = pass_data->second;
+ MT_PASS_INFO &pass_info = pass_data->second;
pass_info.fb = pRenderPassBegin->framebuffer;
auto cb_data = my_data->cbMap.find(cmdBuffer);
for (size_t i = 0; i < pass_info.attachments.size(); ++i) {
- MT_FB_ATTACHMENT_INFO& fb_info = my_data->fbMap[pass_info.fb].attachments[i];
- if (pass_info.attachments[i].load_op == VK_ATTACHMENT_LOAD_OP_CLEAR) {
+ MT_FB_ATTACHMENT_INFO &fb_info =
+ my_data->fbMap[pass_info.fb].attachments[i];
+ if (pass_info.attachments[i].load_op ==
+ VK_ATTACHMENT_LOAD_OP_CLEAR) {
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, fb_info.mem, true, fb_info.image); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, fb_info.mem, true,
+ fb_info.image);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- VkImageLayout& attachment_layout = pass_info.attachment_first_layout[pass_info.attachments[i].attachment];
- if (attachment_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL ||
- attachment_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
- (uint64_t)(pRenderPassBegin->renderPass), __LINE__, MEMTRACK_INVALID_LAYOUT, "MEM",
- "Cannot clear attachment %d with invalid first layout %d.", pass_info.attachments[i].attachment, attachment_layout);
+ VkImageLayout &attachment_layout =
+ pass_info.attachment_first_layout
+ [pass_info.attachments[i].attachment];
+ if (attachment_layout ==
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL ||
+ attachment_layout ==
+ VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
+ skip_call |= log_msg(
+ my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+ (uint64_t)(pRenderPassBegin->renderPass), __LINE__,
+ MEMTRACK_INVALID_LAYOUT, "MEM",
+ "Cannot clear attachment %d with invalid first "
+ "layout %d.",
+ pass_info.attachments[i].attachment,
+ attachment_layout);
}
- } else if (pass_info.attachments[i].load_op == VK_ATTACHMENT_LOAD_OP_DONT_CARE) {
+ } else if (pass_info.attachments[i].load_op ==
+ VK_ATTACHMENT_LOAD_OP_DONT_CARE) {
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, fb_info.mem, false, fb_info.image); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, fb_info.mem, false,
+ fb_info.image);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- } else if (pass_info.attachments[i].load_op == VK_ATTACHMENT_LOAD_OP_LOAD) {
+ } else if (pass_info.attachments[i].load_op ==
+ VK_ATTACHMENT_LOAD_OP_LOAD) {
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, fb_info.mem, "vkCmdBeginRenderPass()", fb_info.image); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(
+ my_data, fb_info.mem, "vkCmdBeginRenderPass()",
+ fb_info.image);
+ };
cb_data->second.validate_functions.push_back(function);
}
}
- if (pass_info.attachment_first_read[pass_info.attachments[i].attachment]) {
+ if (pass_info.attachment_first_read[pass_info.attachments[i]
+ .attachment]) {
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { return validate_memory_is_valid(my_data, fb_info.mem, "vkCmdBeginRenderPass()", fb_info.image); };
+ std::function<VkBool32()> function = [=]() {
+ return validate_memory_is_valid(
+ my_data, fb_info.mem, "vkCmdBeginRenderPass()",
+ fb_info.image);
+ };
cb_data->second.validate_functions.push_back(function);
}
}
@@ -3199,28 +3556,40 @@
loader_platform_thread_unlock_mutex(&globalLock);
}
if (!skip_call)
- return my_data->device_dispatch_table->CmdBeginRenderPass(cmdBuffer, pRenderPassBegin, contents);
+ return my_data->device_dispatch_table->CmdBeginRenderPass(
+ cmdBuffer, pRenderPassBegin, contents);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(
- VkCommandBuffer cmdBuffer)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdEndRenderPass(VkCommandBuffer cmdBuffer) {
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(cmdBuffer), layer_data_map);
auto cb_data = my_data->cbMap.find(cmdBuffer);
if (cb_data != my_data->cbMap.end()) {
auto pass_data = my_data->passMap.find(cb_data->second.pass);
if (pass_data != my_data->passMap.end()) {
- MT_PASS_INFO& pass_info = pass_data->second;
+ MT_PASS_INFO &pass_info = pass_data->second;
for (size_t i = 0; i < pass_info.attachments.size(); ++i) {
- MT_FB_ATTACHMENT_INFO& fb_info = my_data->fbMap[pass_info.fb].attachments[i];
- if (pass_info.attachments[i].store_op == VK_ATTACHMENT_STORE_OP_STORE) {
+ MT_FB_ATTACHMENT_INFO &fb_info =
+ my_data->fbMap[pass_info.fb].attachments[i];
+ if (pass_info.attachments[i].store_op ==
+ VK_ATTACHMENT_STORE_OP_STORE) {
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, fb_info.mem, true, fb_info.image); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, fb_info.mem, true,
+ fb_info.image);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
- } else if (pass_info.attachments[i].store_op == VK_ATTACHMENT_STORE_OP_DONT_CARE) {
+ } else if (pass_info.attachments[i].store_op ==
+ VK_ATTACHMENT_STORE_OP_DONT_CARE) {
if (cb_data != my_data->cbMap.end()) {
- std::function<VkBool32()> function = [=]() { set_memory_valid(my_data, fb_info.mem, false, fb_info.image); return VK_FALSE; };
+ std::function<VkBool32()> function = [=]() {
+ set_memory_valid(my_data, fb_info.mem, false,
+ fb_info.image);
+ return VK_FALSE;
+ };
cb_data->second.validate_functions.push_back(function);
}
}
@@ -3230,96 +3599,94 @@
my_data->device_dispatch_table->CmdEndRenderPass(cmdBuffer);
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(
- VkDevice dev,
- const char *funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetDeviceProcAddr(VkDevice dev, const char *funcName) {
if (!strcmp(funcName, "vkGetDeviceProcAddr"))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
if (!strcmp(funcName, "vkDestroyDevice"))
- return (PFN_vkVoidFunction) vkDestroyDevice;
+ return (PFN_vkVoidFunction)vkDestroyDevice;
if (!strcmp(funcName, "vkQueueSubmit"))
- return (PFN_vkVoidFunction) vkQueueSubmit;
+ return (PFN_vkVoidFunction)vkQueueSubmit;
if (!strcmp(funcName, "vkAllocateMemory"))
- return (PFN_vkVoidFunction) vkAllocateMemory;
+ return (PFN_vkVoidFunction)vkAllocateMemory;
if (!strcmp(funcName, "vkFreeMemory"))
- return (PFN_vkVoidFunction) vkFreeMemory;
+ return (PFN_vkVoidFunction)vkFreeMemory;
if (!strcmp(funcName, "vkMapMemory"))
- return (PFN_vkVoidFunction) vkMapMemory;
+ return (PFN_vkVoidFunction)vkMapMemory;
if (!strcmp(funcName, "vkUnmapMemory"))
- return (PFN_vkVoidFunction) vkUnmapMemory;
+ return (PFN_vkVoidFunction)vkUnmapMemory;
if (!strcmp(funcName, "vkFlushMappedMemoryRanges"))
- return (PFN_vkVoidFunction) vkFlushMappedMemoryRanges;
+ return (PFN_vkVoidFunction)vkFlushMappedMemoryRanges;
if (!strcmp(funcName, "vkInvalidateMappedMemoryRanges"))
- return (PFN_vkVoidFunction) vkInvalidateMappedMemoryRanges;
+ return (PFN_vkVoidFunction)vkInvalidateMappedMemoryRanges;
if (!strcmp(funcName, "vkDestroyFence"))
- return (PFN_vkVoidFunction) vkDestroyFence;
+ return (PFN_vkVoidFunction)vkDestroyFence;
if (!strcmp(funcName, "vkDestroyBuffer"))
- return (PFN_vkVoidFunction) vkDestroyBuffer;
+ return (PFN_vkVoidFunction)vkDestroyBuffer;
if (!strcmp(funcName, "vkDestroyImage"))
- return (PFN_vkVoidFunction) vkDestroyImage;
+ return (PFN_vkVoidFunction)vkDestroyImage;
if (!strcmp(funcName, "vkBindBufferMemory"))
- return (PFN_vkVoidFunction) vkBindBufferMemory;
+ return (PFN_vkVoidFunction)vkBindBufferMemory;
if (!strcmp(funcName, "vkBindImageMemory"))
- return (PFN_vkVoidFunction) vkBindImageMemory;
+ return (PFN_vkVoidFunction)vkBindImageMemory;
if (!strcmp(funcName, "vkGetBufferMemoryRequirements"))
- return (PFN_vkVoidFunction) vkGetBufferMemoryRequirements;
+ return (PFN_vkVoidFunction)vkGetBufferMemoryRequirements;
if (!strcmp(funcName, "vkGetImageMemoryRequirements"))
- return (PFN_vkVoidFunction) vkGetImageMemoryRequirements;
+ return (PFN_vkVoidFunction)vkGetImageMemoryRequirements;
if (!strcmp(funcName, "vkQueueBindSparse"))
- return (PFN_vkVoidFunction) vkQueueBindSparse;
+ return (PFN_vkVoidFunction)vkQueueBindSparse;
if (!strcmp(funcName, "vkCreateFence"))
- return (PFN_vkVoidFunction) vkCreateFence;
+ return (PFN_vkVoidFunction)vkCreateFence;
if (!strcmp(funcName, "vkGetFenceStatus"))
- return (PFN_vkVoidFunction) vkGetFenceStatus;
+ return (PFN_vkVoidFunction)vkGetFenceStatus;
if (!strcmp(funcName, "vkResetFences"))
- return (PFN_vkVoidFunction) vkResetFences;
+ return (PFN_vkVoidFunction)vkResetFences;
if (!strcmp(funcName, "vkWaitForFences"))
- return (PFN_vkVoidFunction) vkWaitForFences;
+ return (PFN_vkVoidFunction)vkWaitForFences;
if (!strcmp(funcName, "vkCreateSemaphore"))
- return (PFN_vkVoidFunction) vkCreateSemaphore;
+ return (PFN_vkVoidFunction)vkCreateSemaphore;
if (!strcmp(funcName, "vkDestroySemaphore"))
- return (PFN_vkVoidFunction) vkDestroySemaphore;
+ return (PFN_vkVoidFunction)vkDestroySemaphore;
if (!strcmp(funcName, "vkQueueWaitIdle"))
- return (PFN_vkVoidFunction) vkQueueWaitIdle;
+ return (PFN_vkVoidFunction)vkQueueWaitIdle;
if (!strcmp(funcName, "vkDeviceWaitIdle"))
- return (PFN_vkVoidFunction) vkDeviceWaitIdle;
+ return (PFN_vkVoidFunction)vkDeviceWaitIdle;
if (!strcmp(funcName, "vkCreateBuffer"))
- return (PFN_vkVoidFunction) vkCreateBuffer;
+ return (PFN_vkVoidFunction)vkCreateBuffer;
if (!strcmp(funcName, "vkCreateImage"))
- return (PFN_vkVoidFunction) vkCreateImage;
+ return (PFN_vkVoidFunction)vkCreateImage;
if (!strcmp(funcName, "vkCreateImageView"))
- return (PFN_vkVoidFunction) vkCreateImageView;
+ return (PFN_vkVoidFunction)vkCreateImageView;
if (!strcmp(funcName, "vkCreateBufferView"))
- return (PFN_vkVoidFunction) vkCreateBufferView;
+ return (PFN_vkVoidFunction)vkCreateBufferView;
if (!strcmp(funcName, "vkAllocateCommandBuffers"))
- return (PFN_vkVoidFunction) vkAllocateCommandBuffers;
+ return (PFN_vkVoidFunction)vkAllocateCommandBuffers;
if (!strcmp(funcName, "vkFreeCommandBuffers"))
- return (PFN_vkVoidFunction) vkFreeCommandBuffers;
+ return (PFN_vkVoidFunction)vkFreeCommandBuffers;
if (!strcmp(funcName, "vkCreateCommandPool"))
- return (PFN_vkVoidFunction) vkCreateCommandPool;
+ return (PFN_vkVoidFunction)vkCreateCommandPool;
if (!strcmp(funcName, "vkDestroyCommandPool"))
- return (PFN_vkVoidFunction) vkDestroyCommandPool;
+ return (PFN_vkVoidFunction)vkDestroyCommandPool;
if (!strcmp(funcName, "vkResetCommandPool"))
- return (PFN_vkVoidFunction) vkResetCommandPool;
+ return (PFN_vkVoidFunction)vkResetCommandPool;
if (!strcmp(funcName, "vkBeginCommandBuffer"))
- return (PFN_vkVoidFunction) vkBeginCommandBuffer;
+ return (PFN_vkVoidFunction)vkBeginCommandBuffer;
if (!strcmp(funcName, "vkEndCommandBuffer"))
- return (PFN_vkVoidFunction) vkEndCommandBuffer;
+ return (PFN_vkVoidFunction)vkEndCommandBuffer;
if (!strcmp(funcName, "vkResetCommandBuffer"))
- return (PFN_vkVoidFunction) vkResetCommandBuffer;
+ return (PFN_vkVoidFunction)vkResetCommandBuffer;
if (!strcmp(funcName, "vkCmdBindPipeline"))
- return (PFN_vkVoidFunction) vkCmdBindPipeline;
+ return (PFN_vkVoidFunction)vkCmdBindPipeline;
if (!strcmp(funcName, "vkCmdBindDescriptorSets"))
- return (PFN_vkVoidFunction) vkCmdBindDescriptorSets;
+ return (PFN_vkVoidFunction)vkCmdBindDescriptorSets;
if (!strcmp(funcName, "vkCmdBindVertexBuffers"))
- return (PFN_vkVoidFunction) vkCmdBindVertexBuffers;
+ return (PFN_vkVoidFunction)vkCmdBindVertexBuffers;
if (!strcmp(funcName, "vkCmdBindIndexBuffer"))
- return (PFN_vkVoidFunction) vkCmdBindIndexBuffer;
+ return (PFN_vkVoidFunction)vkCmdBindIndexBuffer;
if (!strcmp(funcName, "vkCmdDrawIndirect"))
- return (PFN_vkVoidFunction) vkCmdDrawIndirect;
+ return (PFN_vkVoidFunction)vkCmdDrawIndirect;
if (!strcmp(funcName, "vkCmdDrawIndexedIndirect"))
- return (PFN_vkVoidFunction) vkCmdDrawIndexedIndirect;
+ return (PFN_vkVoidFunction)vkCmdDrawIndexedIndirect;
if (!strcmp(funcName, "vkCmdDispatchIndirect"))
return (PFN_vkVoidFunction)vkCmdDispatchIndirect;
if (!strcmp(funcName, "vkCmdCopyBuffer"))
@@ -3327,54 +3694,52 @@
if (!strcmp(funcName, "vkCmdCopyQueryPoolResults"))
return (PFN_vkVoidFunction)vkCmdCopyQueryPoolResults;
if (!strcmp(funcName, "vkCmdCopyImage"))
- return (PFN_vkVoidFunction) vkCmdCopyImage;
+ return (PFN_vkVoidFunction)vkCmdCopyImage;
if (!strcmp(funcName, "vkCmdCopyBufferToImage"))
- return (PFN_vkVoidFunction) vkCmdCopyBufferToImage;
+ return (PFN_vkVoidFunction)vkCmdCopyBufferToImage;
if (!strcmp(funcName, "vkCmdCopyImageToBuffer"))
- return (PFN_vkVoidFunction) vkCmdCopyImageToBuffer;
+ return (PFN_vkVoidFunction)vkCmdCopyImageToBuffer;
if (!strcmp(funcName, "vkCmdUpdateBuffer"))
- return (PFN_vkVoidFunction) vkCmdUpdateBuffer;
+ return (PFN_vkVoidFunction)vkCmdUpdateBuffer;
if (!strcmp(funcName, "vkCmdFillBuffer"))
- return (PFN_vkVoidFunction) vkCmdFillBuffer;
+ return (PFN_vkVoidFunction)vkCmdFillBuffer;
if (!strcmp(funcName, "vkCmdClearColorImage"))
- return (PFN_vkVoidFunction) vkCmdClearColorImage;
+ return (PFN_vkVoidFunction)vkCmdClearColorImage;
if (!strcmp(funcName, "vkCmdClearDepthStencilImage"))
- return (PFN_vkVoidFunction) vkCmdClearDepthStencilImage;
+ return (PFN_vkVoidFunction)vkCmdClearDepthStencilImage;
if (!strcmp(funcName, "vkCmdResolveImage"))
- return (PFN_vkVoidFunction) vkCmdResolveImage;
+ return (PFN_vkVoidFunction)vkCmdResolveImage;
if (!strcmp(funcName, "vkCmdBeginQuery"))
- return (PFN_vkVoidFunction) vkCmdBeginQuery;
+ return (PFN_vkVoidFunction)vkCmdBeginQuery;
if (!strcmp(funcName, "vkCmdEndQuery"))
- return (PFN_vkVoidFunction) vkCmdEndQuery;
+ return (PFN_vkVoidFunction)vkCmdEndQuery;
if (!strcmp(funcName, "vkCmdResetQueryPool"))
- return (PFN_vkVoidFunction) vkCmdResetQueryPool;
+ return (PFN_vkVoidFunction)vkCmdResetQueryPool;
if (!strcmp(funcName, "vkCreateRenderPass"))
- return (PFN_vkVoidFunction) vkCreateRenderPass;
+ return (PFN_vkVoidFunction)vkCreateRenderPass;
if (!strcmp(funcName, "vkCmdBeginRenderPass"))
- return (PFN_vkVoidFunction) vkCmdBeginRenderPass;
+ return (PFN_vkVoidFunction)vkCmdBeginRenderPass;
if (!strcmp(funcName, "vkCmdEndRenderPass"))
- return (PFN_vkVoidFunction) vkCmdEndRenderPass;
+ return (PFN_vkVoidFunction)vkCmdEndRenderPass;
if (!strcmp(funcName, "vkGetDeviceQueue"))
- return (PFN_vkVoidFunction) vkGetDeviceQueue;
+ return (PFN_vkVoidFunction)vkGetDeviceQueue;
if (!strcmp(funcName, "vkCreateFramebuffer"))
- return (PFN_vkVoidFunction) vkCreateFramebuffer;
+ return (PFN_vkVoidFunction)vkCreateFramebuffer;
if (!strcmp(funcName, "vkDestroyFramebuffer"))
- return (PFN_vkVoidFunction) vkDestroyFramebuffer;
-
+ return (PFN_vkVoidFunction)vkDestroyFramebuffer;
if (dev == NULL)
return NULL;
layer_data *my_data;
my_data = get_my_data_ptr(get_dispatch_key(dev), layer_data_map);
- if (my_data->wsi_enabled)
- {
+ if (my_data->wsi_enabled) {
if (!strcmp(funcName, "vkCreateSwapchainKHR"))
- return (PFN_vkVoidFunction) vkCreateSwapchainKHR;
+ return (PFN_vkVoidFunction)vkCreateSwapchainKHR;
if (!strcmp(funcName, "vkDestroySwapchainKHR"))
- return (PFN_vkVoidFunction) vkDestroySwapchainKHR;
+ return (PFN_vkVoidFunction)vkDestroySwapchainKHR;
if (!strcmp(funcName, "vkGetSwapchainImagesKHR"))
- return (PFN_vkVoidFunction) vkGetSwapchainImagesKHR;
+ return (PFN_vkVoidFunction)vkGetSwapchainImagesKHR;
if (!strcmp(funcName, "vkAcquireNextImageKHR"))
return (PFN_vkVoidFunction)vkAcquireNextImageKHR;
if (!strcmp(funcName, "vkQueuePresentKHR"))
@@ -3387,42 +3752,42 @@
return pDisp->GetDeviceProcAddr(dev, funcName);
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(
- VkInstance instance,
- const char *funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
PFN_vkVoidFunction fptr;
if (!strcmp(funcName, "vkGetInstanceProcAddr"))
- return (PFN_vkVoidFunction) vkGetInstanceProcAddr;
+ return (PFN_vkVoidFunction)vkGetInstanceProcAddr;
if (!strcmp(funcName, "vkGetDeviceProcAddr"))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
if (!strcmp(funcName, "vkDestroyInstance"))
- return (PFN_vkVoidFunction) vkDestroyInstance;
+ return (PFN_vkVoidFunction)vkDestroyInstance;
if (!strcmp(funcName, "vkCreateInstance"))
- return (PFN_vkVoidFunction) vkCreateInstance;
+ return (PFN_vkVoidFunction)vkCreateInstance;
if (!strcmp(funcName, "vkGetPhysicalDeviceMemoryProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceMemoryProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceMemoryProperties;
if (!strcmp(funcName, "vkCreateDevice"))
- return (PFN_vkVoidFunction) vkCreateDevice;
+ return (PFN_vkVoidFunction)vkCreateDevice;
if (!strcmp(funcName, "vkEnumerateInstanceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceLayerProperties;
if (!strcmp(funcName, "vkEnumerateInstanceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceExtensionProperties;
if (!strcmp(funcName, "vkEnumerateDeviceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateDeviceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateDeviceLayerProperties;
if (!strcmp(funcName, "vkEnumerateDeviceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateDeviceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateDeviceExtensionProperties;
- if (instance == NULL) return NULL;
+ if (instance == NULL)
+ return NULL;
layer_data *my_data;
my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
fptr = debug_report_get_instance_proc_addr(my_data->report_data, funcName);
- if (fptr) return fptr;
+ if (fptr)
+ return fptr;
- VkLayerInstanceDispatchTable* pTable = my_data->instance_dispatch_table;
+ VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
if (pTable->GetInstanceProcAddr == NULL)
return NULL;
return pTable->GetInstanceProcAddr(instance, funcName);
diff --git a/layers/mem_tracker.h b/layers/mem_tracker.h
index ae4f0b8..a52e2e3 100644
--- a/layers/mem_tracker.h
+++ b/layers/mem_tracker.h
@@ -41,34 +41,41 @@
#endif
// Mem Tracker ERROR codes
-typedef enum _MEM_TRACK_ERROR
-{
- MEMTRACK_NONE, // Used for INFO & other non-error messages
- MEMTRACK_INVALID_CB, // Cmd Buffer invalid
- MEMTRACK_INVALID_MEM_OBJ, // Invalid Memory Object
- MEMTRACK_INVALID_ALIASING, // Invalid Memory Aliasing
- MEMTRACK_INVALID_LAYOUT, // Invalid Layout
- MEMTRACK_INTERNAL_ERROR, // Bug in Mem Track Layer internal data structures
- MEMTRACK_FREED_MEM_REF, // MEM Obj freed while it still has obj and/or CB refs
- MEMTRACK_MEM_OBJ_CLEAR_EMPTY_BINDINGS, // Clearing bindings on mem obj that doesn't have any bindings
- MEMTRACK_MISSING_MEM_BINDINGS, // Trying to retrieve mem bindings, but none found (may be internal error)
- MEMTRACK_INVALID_OBJECT, // Attempting to reference generic VK Object that is invalid
- MEMTRACK_MEMORY_BINDING_ERROR, // Error during one of many calls that bind memory to object or CB
- MEMTRACK_MEMORY_LEAK, // Failure to call vkFreeMemory on Mem Obj prior to DestroyDevice
- MEMTRACK_INVALID_STATE, // Memory not in the correct state
- MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, // vkResetCommandBuffer() called on a CB that hasn't completed
- MEMTRACK_INVALID_FENCE_STATE, // Invalid Fence State signaled or used
- MEMTRACK_REBIND_OBJECT, // Non-sparse object bindings are immutable
- MEMTRACK_INVALID_USAGE_FLAG, // Usage flags specified at image/buffer create conflict w/ use of object
- MEMTRACK_INVALID_MAP, // Size flag specified at alloc is too small for mapping range
+typedef enum _MEM_TRACK_ERROR {
+ MEMTRACK_NONE, // Used for INFO & other non-error messages
+ MEMTRACK_INVALID_CB, // Cmd Buffer invalid
+ MEMTRACK_INVALID_MEM_OBJ, // Invalid Memory Object
+ MEMTRACK_INVALID_ALIASING, // Invalid Memory Aliasing
+ MEMTRACK_INVALID_LAYOUT, // Invalid Layout
+ MEMTRACK_INTERNAL_ERROR, // Bug in Mem Track Layer internal data structures
+ MEMTRACK_FREED_MEM_REF, // MEM Obj freed while it still has obj and/or CB
+ // refs
+ MEMTRACK_MEM_OBJ_CLEAR_EMPTY_BINDINGS, // Clearing bindings on mem obj that
+ // doesn't have any bindings
+ MEMTRACK_MISSING_MEM_BINDINGS, // Trying to retrieve mem bindings, but none
+ // found (may be internal error)
+ MEMTRACK_INVALID_OBJECT, // Attempting to reference generic VK Object that
+ // is invalid
+ MEMTRACK_MEMORY_BINDING_ERROR, // Error during one of many calls that bind
+ // memory to object or CB
+ MEMTRACK_MEMORY_LEAK, // Failure to call vkFreeMemory on Mem Obj prior to
+ // DestroyDevice
+ MEMTRACK_INVALID_STATE, // Memory not in the correct state
+ MEMTRACK_RESET_CB_WHILE_IN_FLIGHT, // vkResetCommandBuffer() called on a CB
+ // that hasn't completed
+ MEMTRACK_INVALID_FENCE_STATE, // Invalid Fence State signaled or used
+ MEMTRACK_REBIND_OBJECT, // Non-sparse object bindings are immutable
+ MEMTRACK_INVALID_USAGE_FLAG, // Usage flags specified at image/buffer create
+ // conflict w/ use of object
+ MEMTRACK_INVALID_MAP, // Size flag specified at alloc is too small for
+ // mapping range
} MEM_TRACK_ERROR;
// MemTracker Semaphore states
-typedef enum _MtSemaphoreState
-{
- MEMTRACK_SEMAPHORE_STATE_UNSET, // Semaphore is in an undefined state
- MEMTRACK_SEMAPHORE_STATE_SIGNALLED, // Semaphore has is in signalled state
- MEMTRACK_SEMAPHORE_STATE_WAIT, // Semaphore is in wait state
+typedef enum _MtSemaphoreState {
+ MEMTRACK_SEMAPHORE_STATE_UNSET, // Semaphore is in an undefined state
+ MEMTRACK_SEMAPHORE_STATE_SIGNALLED, // Semaphore has is in signalled state
+ MEMTRACK_SEMAPHORE_STATE_WAIT, // Semaphore is in wait state
} MtSemaphoreState;
struct MemRange {
@@ -97,40 +104,51 @@
* 2. Mem Objects
* CREATION - Add object,structure to map
* OBJ BIND - Add obj structure to list container for that mem node
- * CMB BIND - If mem-related add CB structure to list container for that mem node
+ * CMB BIND - If mem-related add CB structure to list container for that mem
+ *node
* DESTROY - Flag as errors any remaining refs and remove from map
* 3. Generic Objects
- * MEM BIND - DESTROY any previous binding, Add obj node w/ ref to map, add obj ref to list container for that mem node
- * DESTROY - If mem bound, remove reference list container for that memInfo, remove object ref from map
+ * MEM BIND - DESTROY any previous binding, Add obj node w/ ref to map, add
+ *obj ref to list container for that mem node
+ * DESTROY - If mem bound, remove reference list container for that memInfo,
+ *remove object ref from map
*/
-// TODO : Is there a way to track when Cmd Buffer finishes & remove mem references at that point?
-// TODO : Could potentially store a list of freed mem allocs to flag when they're incorrectly used
+// TODO : Is there a way to track when Cmd Buffer finishes & remove mem
+// references at that point?
+// TODO : Could potentially store a list of freed mem allocs to flag when
+// they're incorrectly used
-// Simple struct to hold handle and type of object so they can be uniquely identified and looked up in appropriate map
+// Simple struct to hold handle and type of object so they can be uniquely
+// identified and looked up in appropriate map
struct MT_OBJ_HANDLE_TYPE {
- uint64_t handle;
+ uint64_t handle;
VkDebugReportObjectTypeEXT type;
};
// Data struct for tracking memory object
struct MT_MEM_OBJ_INFO {
- void* object; // Dispatchable object used to create this memory (device of swapchain)
- uint32_t refCount; // Count of references (obj bindings or CB use)
- bool valid; // Stores if the memory has valid data or not
- VkDeviceMemory mem;
- VkMemoryAllocateInfo allocInfo;
- list<MT_OBJ_HANDLE_TYPE> pObjBindings; // list container of objects bound to this memory
- list<VkCommandBuffer> pCommandBufferBindings; // list container of cmd buffers that reference this mem object
- MemRange memRange;
- void *pData, *pDriverData;
+ void *object; // Dispatchable object used to create this memory (device of
+ // swapchain)
+ uint32_t refCount; // Count of references (obj bindings or CB use)
+ bool valid; // Stores if the memory has valid data or not
+ VkDeviceMemory mem;
+ VkMemoryAllocateInfo allocInfo;
+ list<MT_OBJ_HANDLE_TYPE>
+ pObjBindings; // list container of objects bound to this memory
+ list<VkCommandBuffer> pCommandBufferBindings; // list container of cmd
+ // buffers that reference this
+ // mem object
+ MemRange memRange;
+ void *pData, *pDriverData;
};
// This only applies to Buffers and Images, which can have memory bound to them
struct MT_OBJ_BINDING_INFO {
VkDeviceMemory mem;
- bool valid; //If this is a swapchain image backing memory is not a MT_MEM_OBJ_INFO so store it here.
+ bool valid; // If this is a swapchain image backing memory is not a
+ // MT_MEM_OBJ_INFO so store it here.
union create_info {
- VkImageCreateInfo image;
+ VkImageCreateInfo image;
VkBufferCreateInfo buffer;
} create_info;
};
@@ -138,24 +156,28 @@
// Track all command buffers
typedef struct _MT_CB_INFO {
VkCommandBufferAllocateInfo createInfo;
- VkPipeline pipelines[VK_PIPELINE_BIND_POINT_RANGE_SIZE];
- uint32_t attachmentCount;
- VkCommandBuffer commandBuffer;
- uint64_t fenceId;
- VkFence lastSubmittedFence;
- VkQueue lastSubmittedQueue;
- VkRenderPass pass;
- vector<std::function<VkBool32()> > validate_functions;
+ VkPipeline pipelines[VK_PIPELINE_BIND_POINT_RANGE_SIZE];
+ uint32_t attachmentCount;
+ VkCommandBuffer commandBuffer;
+ uint64_t fenceId;
+ VkFence lastSubmittedFence;
+ VkQueue lastSubmittedQueue;
+ VkRenderPass pass;
+ vector<std::function<VkBool32()>> validate_functions;
// Order dependent, stl containers must be at end of struct
- list<VkDeviceMemory> pMemObjList; // List container of Mem objs referenced by this CB
+ list<VkDeviceMemory>
+ pMemObjList; // List container of Mem objs referenced by this CB
// Constructor
- _MT_CB_INFO():createInfo{},pipelines{},attachmentCount(0),fenceId(0),lastSubmittedFence{},lastSubmittedQueue{} {};
+ _MT_CB_INFO()
+ : createInfo{}, pipelines{}, attachmentCount(0), fenceId(0),
+ lastSubmittedFence{}, lastSubmittedQueue{} {};
} MT_CB_INFO;
// Track command pools and their command buffers
typedef struct _MT_CMD_POOL_INFO {
- VkCommandPoolCreateFlags createFlags;
- list<VkCommandBuffer> pCommandBuffers; // list container of cmd buffers allocated from this pool
+ VkCommandPoolCreateFlags createFlags;
+ list<VkCommandBuffer> pCommandBuffers; // list container of cmd buffers
+ // allocated from this pool
} MT_CMD_POOL_INFO;
struct MT_IMAGE_VIEW_INFO {
@@ -172,9 +194,9 @@
};
struct MT_PASS_ATTACHMENT_INFO {
- uint32_t attachment;
- VkAttachmentLoadOp load_op;
- VkAttachmentStoreOp store_op;
+ uint32_t attachment;
+ VkAttachmentLoadOp load_op;
+ VkAttachmentStoreOp store_op;
};
struct MT_PASS_INFO {
@@ -186,24 +208,25 @@
// Associate fenceId with a fence object
struct MT_FENCE_INFO {
- uint64_t fenceId; // Sequence number for fence at last submit
- VkQueue queue; // Queue that this fence is submitted against or NULL
- VkBool32 firstTimeFlag; // Fence was created in signaled state, avoid warnings for first use
+ uint64_t fenceId; // Sequence number for fence at last submit
+ VkQueue queue; // Queue that this fence is submitted against or NULL
+ VkBool32 firstTimeFlag; // Fence was created in signaled state, avoid
+ // warnings for first use
VkFenceCreateInfo createInfo;
};
// Track Queue information
struct MT_QUEUE_INFO {
- uint64_t lastRetiredId;
- uint64_t lastSubmittedId;
- list<VkCommandBuffer> pQueueCommandBuffers;
- list<VkDeviceMemory> pMemRefList;
+ uint64_t lastRetiredId;
+ uint64_t lastSubmittedId;
+ list<VkCommandBuffer> pQueueCommandBuffers;
+ list<VkDeviceMemory> pMemRefList;
};
// Track Swapchain Information
struct MT_SWAP_CHAIN_INFO {
- VkSwapchainCreateInfoKHR createInfo;
- std::vector<VkImage> images;
+ VkSwapchainCreateInfoKHR createInfo;
+ std::vector<VkImage> images;
};
struct MEMORY_RANGE {
diff --git a/layers/object_tracker.h b/layers/object_tracker.h
index 1f2b41a..61263fa 100644
--- a/layers/object_tracker.h
+++ b/layers/object_tracker.h
@@ -37,61 +37,64 @@
#include "vk_layer_table.h"
// Object Tracker ERROR codes
-typedef enum _OBJECT_TRACK_ERROR
-{
- OBJTRACK_NONE, // Used for INFO & other non-error messages
- OBJTRACK_UNKNOWN_OBJECT, // Updating uses of object that's not in global object list
- OBJTRACK_INTERNAL_ERROR, // Bug with data tracking within the layer
- OBJTRACK_DESTROY_OBJECT_FAILED, // Couldn't find object to be destroyed
- OBJTRACK_OBJECT_LEAK, // OBJECT was not correctly freed/destroyed
- OBJTRACK_OBJCOUNT_MAX_EXCEEDED, // Request for Object data in excess of max obj count
- OBJTRACK_INVALID_OBJECT, // Object used that has never been created
- OBJTRACK_DESCRIPTOR_POOL_MISMATCH, // Descriptor Pools specified incorrectly
- OBJTRACK_COMMAND_POOL_MISMATCH, // Command Pools specified incorrectly
+typedef enum _OBJECT_TRACK_ERROR {
+ OBJTRACK_NONE, // Used for INFO & other non-error messages
+ OBJTRACK_UNKNOWN_OBJECT, // Updating uses of object that's not in global
+ // object list
+ OBJTRACK_INTERNAL_ERROR, // Bug with data tracking within the layer
+ OBJTRACK_DESTROY_OBJECT_FAILED, // Couldn't find object to be destroyed
+ OBJTRACK_OBJECT_LEAK, // OBJECT was not correctly freed/destroyed
+ OBJTRACK_OBJCOUNT_MAX_EXCEEDED, // Request for Object data in excess of max
+ // obj count
+ OBJTRACK_INVALID_OBJECT, // Object used that has never been created
+ OBJTRACK_DESCRIPTOR_POOL_MISMATCH, // Descriptor Pools specified incorrectly
+ OBJTRACK_COMMAND_POOL_MISMATCH, // Command Pools specified incorrectly
} OBJECT_TRACK_ERROR;
// Object Status -- used to track state of individual objects
typedef VkFlags ObjectStatusFlags;
-typedef enum _ObjectStatusFlagBits
-{
- OBJSTATUS_NONE = 0x00000000, // No status is set
- OBJSTATUS_FENCE_IS_SUBMITTED = 0x00000001, // Fence has been submitted
- OBJSTATUS_VIEWPORT_BOUND = 0x00000002, // Viewport state object has been bound
- OBJSTATUS_RASTER_BOUND = 0x00000004, // Viewport state object has been bound
- OBJSTATUS_COLOR_BLEND_BOUND = 0x00000008, // Viewport state object has been bound
- OBJSTATUS_DEPTH_STENCIL_BOUND = 0x00000010, // Viewport state object has been bound
- OBJSTATUS_GPU_MEM_MAPPED = 0x00000020, // Memory object is currently mapped
- OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000040, // Command Buffer is of type SECONDARY
+typedef enum _ObjectStatusFlagBits {
+ OBJSTATUS_NONE = 0x00000000, // No status is set
+ OBJSTATUS_FENCE_IS_SUBMITTED = 0x00000001, // Fence has been submitted
+ OBJSTATUS_VIEWPORT_BOUND =
+ 0x00000002, // Viewport state object has been bound
+ OBJSTATUS_RASTER_BOUND = 0x00000004, // Viewport state object has been bound
+ OBJSTATUS_COLOR_BLEND_BOUND =
+ 0x00000008, // Viewport state object has been bound
+ OBJSTATUS_DEPTH_STENCIL_BOUND =
+ 0x00000010, // Viewport state object has been bound
+ OBJSTATUS_GPU_MEM_MAPPED = 0x00000020, // Memory object is currently mapped
+ OBJSTATUS_COMMAND_BUFFER_SECONDARY =
+ 0x00000040, // Command Buffer is of type SECONDARY
} ObjectStatusFlagBits;
typedef struct _OBJTRACK_NODE {
- uint64_t vkObj; // Object handle
- VkDebugReportObjectTypeEXT objType; // Object type identifier
- ObjectStatusFlags status; // Object state
- uint64_t parentObj; // Parent object
+ uint64_t vkObj; // Object handle
+ VkDebugReportObjectTypeEXT objType; // Object type identifier
+ ObjectStatusFlags status; // Object state
+ uint64_t parentObj; // Parent object
} OBJTRACK_NODE;
// prototype for extension functions
uint64_t objTrackGetObjectCount(VkDevice device);
-uint64_t objTrackGetObjectsOfTypeCount(VkDevice, VkDebugReportObjectTypeEXT type);
+uint64_t objTrackGetObjectsOfTypeCount(VkDevice,
+ VkDebugReportObjectTypeEXT type);
// Func ptr typedefs
typedef uint64_t (*OBJ_TRACK_GET_OBJECT_COUNT)(VkDevice);
-typedef uint64_t (*OBJ_TRACK_GET_OBJECTS_OF_TYPE_COUNT)(VkDevice, VkDebugReportObjectTypeEXT);
+typedef uint64_t (*OBJ_TRACK_GET_OBJECTS_OF_TYPE_COUNT)(
+ VkDevice, VkDebugReportObjectTypeEXT);
struct layer_data {
debug_report_data *report_data;
- //TODO: put instance data here
- VkDebugReportCallbackEXT logging_callback;
+ // TODO: put instance data here
+ VkDebugReportCallbackEXT logging_callback;
bool wsi_enabled;
bool objtrack_extensions_enabled;
- layer_data() :
- report_data(nullptr),
- logging_callback(VK_NULL_HANDLE),
- wsi_enabled(false),
- objtrack_extensions_enabled(false)
- {};
+ layer_data()
+ : report_data(nullptr), logging_callback(VK_NULL_HANDLE),
+ wsi_enabled(false), objtrack_extensions_enabled(false){};
};
struct instExts {
@@ -99,13 +102,13 @@
};
static std::unordered_map<void *, struct instExts> instanceExtMap;
-static std::unordered_map<void*, layer_data *> layer_data_map;
-static device_table_map object_tracker_device_table_map;
-static instance_table_map object_tracker_instance_table_map;
+static std::unordered_map<void *, layer_data *> layer_data_map;
+static device_table_map object_tracker_device_table_map;
+static instance_table_map object_tracker_instance_table_map;
// We need additionally validate image usage using a separate map
// of swapchain-created images
-static unordered_map<uint64_t, OBJTRACK_NODE*> swapchainImageMap;
+static unordered_map<uint64_t, OBJTRACK_NODE *> swapchainImageMap;
static long long unsigned int object_track_index = 0;
static int objLockInitialized = 0;
@@ -114,74 +117,74 @@
// Objects stored in a global map w/ struct containing basic info
// unordered_map<const void*, OBJTRACK_NODE*> objMap;
-#define NUM_OBJECT_TYPES (VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT+1)
+#define NUM_OBJECT_TYPES (VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT + 1)
-static uint64_t numObjs[NUM_OBJECT_TYPES] = {0};
-static uint64_t numTotalObjs = 0;
-static VkQueueFamilyProperties *queueInfo = NULL;
-static uint32_t queueCount = 0;
+static uint64_t numObjs[NUM_OBJECT_TYPES] = {0};
+static uint64_t numTotalObjs = 0;
+static VkQueueFamilyProperties *queueInfo = NULL;
+static uint32_t queueCount = 0;
-template layer_data *get_my_data_ptr<layer_data>(
- void *data_key, std::unordered_map<void *, layer_data *> &data_map);
+template layer_data *
+get_my_data_ptr<layer_data>(void *data_key,
+ std::unordered_map<void *, layer_data *> &data_map);
-static inline const char* string_VkDebugReportObjectTypeEXT(VkDebugReportObjectTypeEXT input_value)
-{
- switch ((VkDebugReportObjectTypeEXT)input_value)
- {
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
- return "VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT";
- default:
- return "Unhandled VkObjectType";
+static inline const char *
+string_VkDebugReportObjectTypeEXT(VkDebugReportObjectTypeEXT input_value) {
+ switch ((VkDebugReportObjectTypeEXT)input_value) {
+ case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
+ return "VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT";
+ default:
+ return "Unhandled VkObjectType";
}
}
@@ -189,84 +192,117 @@
// Internal Object Tracker Functions
//
-static void createDeviceRegisterExtensions(const VkDeviceCreateInfo* pCreateInfo, VkDevice device)
-{
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkLayerDispatchTable *pDisp = get_dispatch_table(object_tracker_device_table_map, device);
+static void
+createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo,
+ VkDevice device) {
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkLayerDispatchTable *pDisp =
+ get_dispatch_table(object_tracker_device_table_map, device);
PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
- pDisp->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
- pDisp->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
- pDisp->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
- pDisp->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
- pDisp->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
+ pDisp->CreateSwapchainKHR =
+ (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
+ pDisp->DestroySwapchainKHR =
+ (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
+ pDisp->GetSwapchainImagesKHR =
+ (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
+ pDisp->AcquireNextImageKHR =
+ (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
+ pDisp->QueuePresentKHR =
+ (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
my_device_data->wsi_enabled = false;
for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0)
my_device_data->wsi_enabled = true;
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], "OBJTRACK_EXTENSIONS") == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ "OBJTRACK_EXTENSIONS") == 0)
my_device_data->objtrack_extensions_enabled = true;
}
}
-static void createInstanceRegisterExtensions(const VkInstanceCreateInfo* pCreateInfo, VkInstance instance)
-{
+static void
+createInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo,
+ VkInstance instance) {
uint32_t i;
- VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(object_tracker_instance_table_map, instance);
+ VkLayerInstanceDispatchTable *pDisp =
+ get_dispatch_table(object_tracker_instance_table_map, instance);
PFN_vkGetInstanceProcAddr gpa = pDisp->GetInstanceProcAddr;
- pDisp->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
- pDisp->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
- pDisp->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
- pDisp->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
+ pDisp->GetPhysicalDeviceSurfaceSupportKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
+ pDisp->GetPhysicalDeviceSurfaceCapabilitiesKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
+ pDisp->GetPhysicalDeviceSurfaceFormatsKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
+ pDisp->GetPhysicalDeviceSurfacePresentModesKHR =
+ (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
#if VK_USE_PLATFORM_WIN32_KHR
- pDisp->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) gpa(instance, "vkCreateWin32SurfaceKHR");
- pDisp->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
+ pDisp->CreateWin32SurfaceKHR =
+ (PFN_vkCreateWin32SurfaceKHR)gpa(instance, "vkCreateWin32SurfaceKHR");
+ pDisp->GetPhysicalDeviceWin32PresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
- pDisp->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR) gpa(instance, "vkCreateXcbSurfaceKHR");
- pDisp->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
+ pDisp->CreateXcbSurfaceKHR =
+ (PFN_vkCreateXcbSurfaceKHR)gpa(instance, "vkCreateXcbSurfaceKHR");
+ pDisp->GetPhysicalDeviceXcbPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
- pDisp->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR) gpa(instance, "vkCreateXlibSurfaceKHR");
- pDisp->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
+ pDisp->CreateXlibSurfaceKHR =
+ (PFN_vkCreateXlibSurfaceKHR)gpa(instance, "vkCreateXlibSurfaceKHR");
+ pDisp->GetPhysicalDeviceXlibPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
#endif // VK_USE_PLATFORM_XLIB_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
- pDisp->CreateMirSurfaceKHR = (PFN_vkCreateMirSurfaceKHR) gpa(instance, "vkCreateMirSurfaceKHR");
- pDisp->GetPhysicalDeviceMirPresentationSupportKHR = (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceMirPresentationSupportKHR");
+ pDisp->CreateMirSurfaceKHR =
+ (PFN_vkCreateMirSurfaceKHR)gpa(instance, "vkCreateMirSurfaceKHR");
+ pDisp->GetPhysicalDeviceMirPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceMirPresentationSupportKHR");
#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- pDisp->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR) gpa(instance, "vkCreateWaylandSurfaceKHR");
- pDisp->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
+ pDisp->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR)gpa(
+ instance, "vkCreateWaylandSurfaceKHR");
+ pDisp->GetPhysicalDeviceWaylandPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- pDisp->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR) gpa(instance, "vkCreateAndroidSurfaceKHR");
+ pDisp->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR)gpa(
+ instance, "vkCreateAndroidSurfaceKHR");
#endif // VK_USE_PLATFORM_ANDROID_KHR
instanceExtMap[pDisp].wsi_enabled = false;
for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_SURFACE_EXTENSION_NAME) == 0)
instanceExtMap[pDisp].wsi_enabled = true;
-
}
}
// Indicate device or instance dispatch table type
-typedef enum _DispTableType
-{
+typedef enum _DispTableType {
DISP_TBL_TYPE_INSTANCE,
DISP_TBL_TYPE_DEVICE,
} DispTableType;
-debug_report_data *mdd(const void* object)
-{
+debug_report_data *mdd(const void *object) {
dispatch_key key = get_dispatch_key(object);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
return my_data->report_data;
}
-debug_report_data *mid(VkInstance object)
-{
+debug_report_data *mid(VkInstance object) {
dispatch_key key = get_dispatch_key(object);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
return my_data->report_data;
@@ -274,7 +310,7 @@
// For each Queue's doubly linked-list of mem refs
typedef struct _OT_MEM_INFO {
- VkDeviceMemory mem;
+ VkDeviceMemory mem;
struct _OT_MEM_INFO *pNextMI;
struct _OT_MEM_INFO *pPrevMI;
@@ -282,51 +318,45 @@
// Track Queue information
typedef struct _OT_QUEUE_INFO {
- OT_MEM_INFO *pMemRefList;
- struct _OT_QUEUE_INFO *pNextQI;
- uint32_t queueNodeIndex;
- VkQueue queue;
- uint32_t refCount;
+ OT_MEM_INFO *pMemRefList;
+ struct _OT_QUEUE_INFO *pNextQI;
+ uint32_t queueNodeIndex;
+ VkQueue queue;
+ uint32_t refCount;
} OT_QUEUE_INFO;
// Global list of QueueInfo structures, one per queue
static OT_QUEUE_INFO *g_pQueueInfo = NULL;
// Convert an object type enum to an object type array index
-static uint32_t
-objTypeToIndex(
- uint32_t objType)
-{
+static uint32_t objTypeToIndex(uint32_t objType) {
uint32_t index = objType;
return index;
}
// Add new queue to head of global queue list
-static void
-addQueueInfo(
- uint32_t queueNodeIndex,
- VkQueue queue)
-{
+static void addQueueInfo(uint32_t queueNodeIndex, VkQueue queue) {
OT_QUEUE_INFO *pQueueInfo = new OT_QUEUE_INFO;
if (pQueueInfo != NULL) {
memset(pQueueInfo, 0, sizeof(OT_QUEUE_INFO));
- pQueueInfo->queue = queue;
+ pQueueInfo->queue = queue;
pQueueInfo->queueNodeIndex = queueNodeIndex;
- pQueueInfo->pNextQI = g_pQueueInfo;
- g_pQueueInfo = pQueueInfo;
- }
- else {
- log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_INTERNAL_ERROR, "OBJTRACK",
- "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate memory for Queue Information");
+ pQueueInfo->pNextQI = g_pQueueInfo;
+ g_pQueueInfo = pQueueInfo;
+ } else {
+ log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+ reinterpret_cast<uint64_t>(queue), __LINE__,
+ OBJTRACK_INTERNAL_ERROR, "OBJTRACK",
+ "ERROR: VK_ERROR_OUT_OF_HOST_MEMORY -- could not allocate "
+ "memory for Queue Information");
}
}
// Destroy memRef lists and free all memory
-static void
-destroyQueueMemRefLists(void)
-{
- OT_QUEUE_INFO *pQueueInfo = g_pQueueInfo;
+static void destroyQueueMemRefLists(void) {
+ OT_QUEUE_INFO *pQueueInfo = g_pQueueInfo;
OT_QUEUE_INFO *pDelQueueInfo = NULL;
while (pQueueInfo != NULL) {
OT_MEM_INFO *pMemInfo = pQueueInfo->pMemRefList;
@@ -336,41 +366,46 @@
delete pDelMemInfo;
}
pDelQueueInfo = pQueueInfo;
- pQueueInfo = pQueueInfo->pNextQI;
+ pQueueInfo = pQueueInfo->pNextQI;
delete pDelQueueInfo;
}
g_pQueueInfo = pQueueInfo;
}
-static void
-setGpuQueueInfoState(
- uint32_t count,
- void *pData)
-{
+static void setGpuQueueInfoState(uint32_t count, void *pData) {
queueCount = count;
- queueInfo = (VkQueueFamilyProperties*)realloc((void*)queueInfo, count * sizeof(VkQueueFamilyProperties));
+ queueInfo = (VkQueueFamilyProperties *)realloc(
+ (void *)queueInfo, count * sizeof(VkQueueFamilyProperties));
if (queueInfo != NULL) {
memcpy(queueInfo, pData, count * sizeof(VkQueueFamilyProperties));
}
}
// Check Queue type flags for selected queue operations
-static void
-validateQueueFlags(
- VkQueue queue,
- const char *function)
-{
+static void validateQueueFlags(VkQueue queue, const char *function) {
OT_QUEUE_INFO *pQueueInfo = g_pQueueInfo;
while ((pQueueInfo != NULL) && (pQueueInfo->queue != queue)) {
pQueueInfo = pQueueInfo->pNextQI;
}
if (pQueueInfo != NULL) {
- if ((queueInfo != NULL) && (queueInfo[pQueueInfo->queueNodeIndex].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) == 0) {
- log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, "OBJTRACK",
- "Attempting %s on a non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not set", function);
+ if ((queueInfo != NULL) &&
+ (queueInfo[pQueueInfo->queueNodeIndex].queueFlags &
+ VK_QUEUE_SPARSE_BINDING_BIT) == 0) {
+ log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+ reinterpret_cast<uint64_t>(queue), __LINE__,
+ OBJTRACK_UNKNOWN_OBJECT, "OBJTRACK",
+ "Attempting %s on a non-memory-management capable queue -- "
+ "VK_QUEUE_SPARSE_BINDING_BIT not set",
+ function);
} else {
- log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, reinterpret_cast<uint64_t>(queue), __LINE__, OBJTRACK_UNKNOWN_OBJECT, "OBJTRACK",
- "Attempting %s on a possibly non-memory-management capable queue -- VK_QUEUE_SPARSE_BINDING_BIT not known", function);
+ log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+ reinterpret_cast<uint64_t>(queue), __LINE__,
+ OBJTRACK_UNKNOWN_OBJECT, "OBJTRACK",
+ "Attempting %s on a possibly non-memory-management capable "
+ "queue -- VK_QUEUE_SPARSE_BINDING_BIT not known",
+ function);
}
}
}
@@ -411,21 +446,17 @@
#endif
#include "vk_dispatch_table_helper.h"
-static void
-initObjectTracker(
- layer_data *my_data,
- const VkAllocationCallbacks *pAllocator)
-{
+static void initObjectTracker(layer_data *my_data,
+ const VkAllocationCallbacks *pAllocator) {
uint32_t report_flags = 0;
uint32_t debug_action = 0;
FILE *log_output = NULL;
const char *option_str;
// initialize ObjectTracker options
report_flags = getLayerOptionFlags("ObjectTrackerReportFlags", 0);
- getLayerOptionEnum("ObjectTrackerDebugAction", (uint32_t *) &debug_action);
+ getLayerOptionEnum("ObjectTrackerDebugAction", (uint32_t *)&debug_action);
- if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG)
- {
+ if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
option_str = getLayerOption("ObjectTrackerLogFilename");
log_output = getLayerLogOutput(option_str, "ObjectTracker");
VkDebugReportCallbackCreateInfoEXT dbgInfo;
@@ -434,11 +465,11 @@
dbgInfo.pfnCallback = log_callback;
dbgInfo.pUserData = log_output;
dbgInfo.flags = report_flags;
- layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator, &my_data->logging_callback);
+ layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator,
+ &my_data->logging_callback);
}
- if (!objLockInitialized)
- {
+ if (!objLockInitialized) {
// TODO/TBD: Need to delete this mutex sometime. How??? One
// suggestion is to call this during vkCreateInstance(), and then we
// can clean it up during vkDestroyInstance(). However, that requires
@@ -453,132 +484,203 @@
// Forward declares of generated routines
//
-static void create_physical_device(VkInstance dispatchable_object, VkPhysicalDevice vkObj, VkDebugReportObjectTypeEXT objType);
-static void create_instance(VkInstance dispatchable_object, VkInstance object, VkDebugReportObjectTypeEXT objType);
-static void create_device(VkDevice dispatchable_object, VkDevice object, VkDebugReportObjectTypeEXT objType);
-static void create_queue(VkDevice dispatchable_object, VkQueue vkObj, VkDebugReportObjectTypeEXT objType);
-static VkBool32 validate_image(VkQueue dispatchable_object, VkImage object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_instance(VkInstance dispatchable_object, VkInstance object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_device(VkDevice dispatchable_object, VkDevice object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_descriptor_pool(VkDevice dispatchable_object, VkDescriptorPool object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_descriptor_set_layout(VkDevice dispatchable_object, VkDescriptorSetLayout object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_command_pool(VkDevice dispatchable_object, VkCommandPool object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_buffer(VkQueue dispatchable_object, VkBuffer object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static void create_pipeline(VkDevice dispatchable_object, VkPipeline vkObj, VkDebugReportObjectTypeEXT objType);
-static VkBool32 validate_pipeline_cache(VkDevice dispatchable_object, VkPipelineCache object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_render_pass(VkDevice dispatchable_object, VkRenderPass object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_shader_module(VkDevice dispatchable_object, VkShaderModule object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_pipeline_layout(VkDevice dispatchable_object, VkPipelineLayout object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static VkBool32 validate_pipeline(VkDevice dispatchable_object, VkPipeline object, VkDebugReportObjectTypeEXT objType, bool null_allowed);
-static void destroy_command_pool(VkDevice dispatchable_object, VkCommandPool object);
-static void destroy_command_buffer(VkCommandBuffer dispatchable_object, VkCommandBuffer object);
-static void destroy_descriptor_pool(VkDevice dispatchable_object, VkDescriptorPool object);
-static void destroy_descriptor_set(VkDevice dispatchable_object, VkDescriptorSet object);
-static void destroy_device_memory(VkDevice dispatchable_object, VkDeviceMemory object);
-static void destroy_swapchain_khr(VkDevice dispatchable_object, VkSwapchainKHR object);
-static VkBool32 set_device_memory_status(VkDevice dispatchable_object, VkDeviceMemory object, VkDebugReportObjectTypeEXT objType, ObjectStatusFlags status_flag);
-static VkBool32 reset_device_memory_status(VkDevice dispatchable_object, VkDeviceMemory object, VkDebugReportObjectTypeEXT objType, ObjectStatusFlags status_flag);
+static void create_physical_device(VkInstance dispatchable_object,
+ VkPhysicalDevice vkObj,
+ VkDebugReportObjectTypeEXT objType);
+static void create_instance(VkInstance dispatchable_object, VkInstance object,
+ VkDebugReportObjectTypeEXT objType);
+static void create_device(VkDevice dispatchable_object, VkDevice object,
+ VkDebugReportObjectTypeEXT objType);
+static void create_queue(VkDevice dispatchable_object, VkQueue vkObj,
+ VkDebugReportObjectTypeEXT objType);
+static VkBool32 validate_image(VkQueue dispatchable_object, VkImage object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_instance(VkInstance dispatchable_object,
+ VkInstance object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_device(VkDevice dispatchable_object, VkDevice object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_descriptor_pool(VkDevice dispatchable_object,
+ VkDescriptorPool object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_descriptor_set_layout(
+ VkDevice dispatchable_object, VkDescriptorSetLayout object,
+ VkDebugReportObjectTypeEXT objType, bool null_allowed);
+static VkBool32 validate_command_pool(VkDevice dispatchable_object,
+ VkCommandPool object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_buffer(VkQueue dispatchable_object, VkBuffer object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static void create_pipeline(VkDevice dispatchable_object, VkPipeline vkObj,
+ VkDebugReportObjectTypeEXT objType);
+static VkBool32 validate_pipeline_cache(VkDevice dispatchable_object,
+ VkPipelineCache object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_render_pass(VkDevice dispatchable_object,
+ VkRenderPass object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_shader_module(VkDevice dispatchable_object,
+ VkShaderModule object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_pipeline_layout(VkDevice dispatchable_object,
+ VkPipelineLayout object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static VkBool32 validate_pipeline(VkDevice dispatchable_object,
+ VkPipeline object,
+ VkDebugReportObjectTypeEXT objType,
+ bool null_allowed);
+static void destroy_command_pool(VkDevice dispatchable_object,
+ VkCommandPool object);
+static void destroy_command_buffer(VkCommandBuffer dispatchable_object,
+ VkCommandBuffer object);
+static void destroy_descriptor_pool(VkDevice dispatchable_object,
+ VkDescriptorPool object);
+static void destroy_descriptor_set(VkDevice dispatchable_object,
+ VkDescriptorSet object);
+static void destroy_device_memory(VkDevice dispatchable_object,
+ VkDeviceMemory object);
+static void destroy_swapchain_khr(VkDevice dispatchable_object,
+ VkSwapchainKHR object);
+static VkBool32 set_device_memory_status(VkDevice dispatchable_object,
+ VkDeviceMemory object,
+ VkDebugReportObjectTypeEXT objType,
+ ObjectStatusFlags status_flag);
+static VkBool32 reset_device_memory_status(VkDevice dispatchable_object,
+ VkDeviceMemory object,
+ VkDebugReportObjectTypeEXT objType,
+ ObjectStatusFlags status_flag);
#if 0
static VkBool32 validate_status(VkDevice dispatchable_object, VkFence object, VkDebugReportObjectTypeEXT objType,
ObjectStatusFlags status_mask, ObjectStatusFlags status_flag, VkFlags msg_flags, OBJECT_TRACK_ERROR error_code,
const char *fail_msg);
#endif
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkPhysicalDeviceMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkImageMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkQueueMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkDescriptorSetMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkBufferMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkFenceMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkSemaphoreMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkCommandPoolMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkCommandBufferMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkSwapchainKHRMap;
-extern unordered_map<uint64_t, OBJTRACK_NODE*> VkSurfaceKHRMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkPhysicalDeviceMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkImageMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkQueueMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkDescriptorSetMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkBufferMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkFenceMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkSemaphoreMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkCommandPoolMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkCommandBufferMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkSwapchainKHRMap;
+extern unordered_map<uint64_t, OBJTRACK_NODE *> VkSurfaceKHRMap;
-static VkBool32 set_status(VkQueue dispatchable_object, VkFence object, VkDebugReportObjectTypeEXT objType, ObjectStatusFlags status_flag)
-{
+static VkBool32 set_status(VkQueue dispatchable_object, VkFence object,
+ VkDebugReportObjectTypeEXT objType,
+ ObjectStatusFlags status_flag) {
VkBool32 skipCall = VK_FALSE;
if (object != VK_NULL_HANDLE) {
if (VkFenceMap.find((uint64_t)(object)) != VkFenceMap.end()) {
- OBJTRACK_NODE* pNode = VkFenceMap[(uint64_t)(object)];
+ OBJTRACK_NODE *pNode = VkFenceMap[(uint64_t)(object)];
pNode->status |= status_flag;
- }
- else {
+ } else {
// If we do not find it print an error
- skipCall |= log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, (uint64_t) object, __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "Unable to set status for non-existent object 0x%" PRIxLEAST64 " of %s type",
+ skipCall |= log_msg(
+ mdd(dispatchable_object), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, (uint64_t)object, __LINE__,
+ OBJTRACK_NONE, "OBJTRACK",
+ "Unable to set status for non-existent object 0x%" PRIxLEAST64
+ " of %s type",
(uint64_t)(object), string_VkDebugReportObjectTypeEXT(objType));
}
}
return skipCall;
}
-static void create_physical_device(VkInstance dispatchable_object, VkPhysicalDevice vkObj, VkDebugReportObjectTypeEXT objType)
-{
- log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT, objType, reinterpret_cast<uint64_t>(vkObj), __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64 , object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
- reinterpret_cast<uint64_t>(vkObj));
+static void create_physical_device(VkInstance dispatchable_object,
+ VkPhysicalDevice vkObj,
+ VkDebugReportObjectTypeEXT objType) {
+ log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT, objType,
+ reinterpret_cast<uint64_t>(vkObj), __LINE__, OBJTRACK_NONE,
+ "OBJTRACK", "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64,
+ object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
+ reinterpret_cast<uint64_t>(vkObj));
- OBJTRACK_NODE* pNewObjNode = new OBJTRACK_NODE;
+ OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
pNewObjNode->objType = objType;
- pNewObjNode->status = OBJSTATUS_NONE;
- pNewObjNode->vkObj = reinterpret_cast<uint64_t>(vkObj);
+ pNewObjNode->status = OBJSTATUS_NONE;
+ pNewObjNode->vkObj = reinterpret_cast<uint64_t>(vkObj);
VkPhysicalDeviceMap[reinterpret_cast<uint64_t>(vkObj)] = pNewObjNode;
uint32_t objIndex = objTypeToIndex(objType);
numObjs[objIndex]++;
numTotalObjs++;
}
-static void create_surface_khr(VkInstance dispatchable_object, VkSurfaceKHR vkObj, VkDebugReportObjectTypeEXT objType)
-{
+static void create_surface_khr(VkInstance dispatchable_object,
+ VkSurfaceKHR vkObj,
+ VkDebugReportObjectTypeEXT objType) {
// TODO: Add tracking of surface objects
- log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT, objType, (uint64_t)(vkObj), __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64 , object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
- (uint64_t)(vkObj));
+ log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT, objType,
+ (uint64_t)(vkObj), __LINE__, OBJTRACK_NONE, "OBJTRACK",
+ "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64,
+ object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
+ (uint64_t)(vkObj));
- OBJTRACK_NODE* pNewObjNode = new OBJTRACK_NODE;
+ OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
pNewObjNode->objType = objType;
- pNewObjNode->status = OBJSTATUS_NONE;
- pNewObjNode->vkObj = (uint64_t)(vkObj);
+ pNewObjNode->status = OBJSTATUS_NONE;
+ pNewObjNode->vkObj = (uint64_t)(vkObj);
VkSurfaceKHRMap[(uint64_t)vkObj] = pNewObjNode;
uint32_t objIndex = objTypeToIndex(objType);
numObjs[objIndex]++;
numTotalObjs++;
}
-static void destroy_surface_khr(VkInstance dispatchable_object, VkSurfaceKHR object)
-{
+static void destroy_surface_khr(VkInstance dispatchable_object,
+ VkSurfaceKHR object) {
uint64_t object_handle = (uint64_t)(object);
if (VkSurfaceKHRMap.find(object_handle) != VkSurfaceKHRMap.end()) {
- OBJTRACK_NODE* pNode = VkSurfaceKHRMap[(uint64_t)object];
+ OBJTRACK_NODE *pNode = VkSurfaceKHRMap[(uint64_t)object];
uint32_t objIndex = objTypeToIndex(pNode->objType);
assert(numTotalObjs > 0);
numTotalObjs--;
assert(numObjs[objIndex] > 0);
numObjs[objIndex]--;
- log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT, pNode->objType, object_handle, __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
- string_VkDebugReportObjectTypeEXT(pNode->objType), (uint64_t)(object), numTotalObjs, numObjs[objIndex],
- string_VkDebugReportObjectTypeEXT(pNode->objType));
+ log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT,
+ pNode->objType, object_handle, __LINE__, OBJTRACK_NONE,
+ "OBJTRACK",
+ "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64
+ " total objs remain & %" PRIu64 " %s objs).",
+ string_VkDebugReportObjectTypeEXT(pNode->objType),
+ (uint64_t)(object), numTotalObjs, numObjs[objIndex],
+ string_VkDebugReportObjectTypeEXT(pNode->objType));
delete pNode;
VkSurfaceKHRMap.erase(object_handle);
} else {
- log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT ) 0, object_handle, __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "Unable to remove obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
- object_handle);
+ log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
+ OBJTRACK_NONE, "OBJTRACK",
+ "Unable to remove obj 0x%" PRIxLEAST64
+ ". Was it created? Has it already been destroyed?",
+ object_handle);
}
}
-static void alloc_command_buffer(VkDevice device, VkCommandPool commandPool, VkCommandBuffer vkObj, VkDebugReportObjectTypeEXT objType, VkCommandBufferLevel level)
-{
- log_msg(mdd(device), VK_DEBUG_REPORT_INFO_BIT_EXT, objType, reinterpret_cast<uint64_t>(vkObj), __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64 , object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
- reinterpret_cast<uint64_t>(vkObj));
+static void alloc_command_buffer(VkDevice device, VkCommandPool commandPool,
+ VkCommandBuffer vkObj,
+ VkDebugReportObjectTypeEXT objType,
+ VkCommandBufferLevel level) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_INFO_BIT_EXT, objType,
+ reinterpret_cast<uint64_t>(vkObj), __LINE__, OBJTRACK_NONE,
+ "OBJTRACK", "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64,
+ object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
+ reinterpret_cast<uint64_t>(vkObj));
- OBJTRACK_NODE* pNewObjNode = new OBJTRACK_NODE;
- pNewObjNode->objType = objType;
- pNewObjNode->vkObj = reinterpret_cast<uint64_t>(vkObj);
- pNewObjNode->parentObj = (uint64_t) commandPool;
+ OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
+ pNewObjNode->objType = objType;
+ pNewObjNode->vkObj = reinterpret_cast<uint64_t>(vkObj);
+ pNewObjNode->parentObj = (uint64_t)commandPool;
if (level == VK_COMMAND_BUFFER_LEVEL_SECONDARY) {
pNewObjNode->status = OBJSTATUS_COMMAND_BUFFER_SECONDARY;
} else {
@@ -590,127 +692,160 @@
numTotalObjs++;
}
-static void free_command_buffer(VkDevice device, VkCommandPool commandPool, VkCommandBuffer commandBuffer)
-{
+static void free_command_buffer(VkDevice device, VkCommandPool commandPool,
+ VkCommandBuffer commandBuffer) {
uint64_t object_handle = reinterpret_cast<uint64_t>(commandBuffer);
if (VkCommandBufferMap.find(object_handle) != VkCommandBufferMap.end()) {
- OBJTRACK_NODE* pNode = VkCommandBufferMap[(uint64_t)commandBuffer];
+ OBJTRACK_NODE *pNode = VkCommandBufferMap[(uint64_t)commandBuffer];
- if (pNode->parentObj != (uint64_t)(commandPool)) {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->objType, object_handle, __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH, "OBJTRACK",
- "FreeCommandBuffers is attempting to free Command Buffer 0x%" PRIxLEAST64 " belonging to Command Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
- reinterpret_cast<uint64_t>(commandBuffer), pNode->parentObj, (uint64_t)(commandPool));
- } else {
+ if (pNode->parentObj != (uint64_t)(commandPool)) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->objType,
+ object_handle, __LINE__, OBJTRACK_COMMAND_POOL_MISMATCH,
+ "OBJTRACK", "FreeCommandBuffers is attempting to free "
+ "Command Buffer 0x%" PRIxLEAST64
+ " belonging to Command Pool 0x%" PRIxLEAST64
+ " from pool 0x%" PRIxLEAST64 ").",
+ reinterpret_cast<uint64_t>(commandBuffer), pNode->parentObj,
+ (uint64_t)(commandPool));
+ } else {
uint32_t objIndex = objTypeToIndex(pNode->objType);
assert(numTotalObjs > 0);
numTotalObjs--;
assert(numObjs[objIndex] > 0);
numObjs[objIndex]--;
- log_msg(mdd(device), VK_DEBUG_REPORT_INFO_BIT_EXT, pNode->objType, object_handle, __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
- string_VkDebugReportObjectTypeEXT(pNode->objType), reinterpret_cast<uint64_t>(commandBuffer), numTotalObjs, numObjs[objIndex],
- string_VkDebugReportObjectTypeEXT(pNode->objType));
+ log_msg(mdd(device), VK_DEBUG_REPORT_INFO_BIT_EXT, pNode->objType,
+ object_handle, __LINE__, OBJTRACK_NONE, "OBJTRACK",
+ "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64
+ " total objs remain & %" PRIu64 " %s objs).",
+ string_VkDebugReportObjectTypeEXT(pNode->objType),
+ reinterpret_cast<uint64_t>(commandBuffer), numTotalObjs,
+ numObjs[objIndex],
+ string_VkDebugReportObjectTypeEXT(pNode->objType));
delete pNode;
VkCommandBufferMap.erase(object_handle);
}
} else {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, object_handle, __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "Unable to remove obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
- object_handle);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
+ OBJTRACK_NONE, "OBJTRACK",
+ "Unable to remove obj 0x%" PRIxLEAST64
+ ". Was it created? Has it already been destroyed?",
+ object_handle);
}
}
-static void alloc_descriptor_set(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorSet vkObj, VkDebugReportObjectTypeEXT objType)
-{
- log_msg(mdd(device), VK_DEBUG_REPORT_INFO_BIT_EXT, objType, (uint64_t)(vkObj), __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64 , object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
- (uint64_t)(vkObj));
+static void alloc_descriptor_set(VkDevice device,
+ VkDescriptorPool descriptorPool,
+ VkDescriptorSet vkObj,
+ VkDebugReportObjectTypeEXT objType) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_INFO_BIT_EXT, objType,
+ (uint64_t)(vkObj), __LINE__, OBJTRACK_NONE, "OBJTRACK",
+ "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64,
+ object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
+ (uint64_t)(vkObj));
- OBJTRACK_NODE* pNewObjNode = new OBJTRACK_NODE;
- pNewObjNode->objType = objType;
- pNewObjNode->status = OBJSTATUS_NONE;
- pNewObjNode->vkObj = (uint64_t)(vkObj);
- pNewObjNode->parentObj = (uint64_t) descriptorPool;
+ OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
+ pNewObjNode->objType = objType;
+ pNewObjNode->status = OBJSTATUS_NONE;
+ pNewObjNode->vkObj = (uint64_t)(vkObj);
+ pNewObjNode->parentObj = (uint64_t)descriptorPool;
VkDescriptorSetMap[(uint64_t)vkObj] = pNewObjNode;
uint32_t objIndex = objTypeToIndex(objType);
numObjs[objIndex]++;
numTotalObjs++;
}
-static void free_descriptor_set(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorSet descriptorSet)
-{
+static void free_descriptor_set(VkDevice device,
+ VkDescriptorPool descriptorPool,
+ VkDescriptorSet descriptorSet) {
uint64_t object_handle = (uint64_t)(descriptorSet);
if (VkDescriptorSetMap.find(object_handle) != VkDescriptorSetMap.end()) {
- OBJTRACK_NODE* pNode = VkDescriptorSetMap[(uint64_t)descriptorSet];
+ OBJTRACK_NODE *pNode = VkDescriptorSetMap[(uint64_t)descriptorSet];
if (pNode->parentObj != (uint64_t)(descriptorPool)) {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->objType, object_handle, __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH, "OBJTRACK",
- "FreeDescriptorSets is attempting to free descriptorSet 0x%" PRIxLEAST64 " belonging to Descriptor Pool 0x%" PRIxLEAST64 " from pool 0x%" PRIxLEAST64 ").",
- (uint64_t)(descriptorSet), pNode->parentObj, (uint64_t)(descriptorPool));
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, pNode->objType,
+ object_handle, __LINE__, OBJTRACK_DESCRIPTOR_POOL_MISMATCH,
+ "OBJTRACK", "FreeDescriptorSets is attempting to free "
+ "descriptorSet 0x%" PRIxLEAST64
+ " belonging to Descriptor Pool 0x%" PRIxLEAST64
+ " from pool 0x%" PRIxLEAST64 ").",
+ (uint64_t)(descriptorSet), pNode->parentObj,
+ (uint64_t)(descriptorPool));
} else {
uint32_t objIndex = objTypeToIndex(pNode->objType);
assert(numTotalObjs > 0);
numTotalObjs--;
assert(numObjs[objIndex] > 0);
numObjs[objIndex]--;
- log_msg(mdd(device), VK_DEBUG_REPORT_INFO_BIT_EXT, pNode->objType, object_handle, __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64 " total objs remain & %" PRIu64 " %s objs).",
- string_VkDebugReportObjectTypeEXT(pNode->objType), (uint64_t)(descriptorSet), numTotalObjs, numObjs[objIndex],
- string_VkDebugReportObjectTypeEXT(pNode->objType));
+ log_msg(mdd(device), VK_DEBUG_REPORT_INFO_BIT_EXT, pNode->objType,
+ object_handle, __LINE__, OBJTRACK_NONE, "OBJTRACK",
+ "OBJ_STAT Destroy %s obj 0x%" PRIxLEAST64 " (%" PRIu64
+ " total objs remain & %" PRIu64 " %s objs).",
+ string_VkDebugReportObjectTypeEXT(pNode->objType),
+ (uint64_t)(descriptorSet), numTotalObjs, numObjs[objIndex],
+ string_VkDebugReportObjectTypeEXT(pNode->objType));
delete pNode;
VkDescriptorSetMap.erase(object_handle);
}
} else {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT) 0, object_handle, __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "Unable to remove obj 0x%" PRIxLEAST64 ". Was it created? Has it already been destroyed?",
- object_handle);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, object_handle, __LINE__,
+ OBJTRACK_NONE, "OBJTRACK",
+ "Unable to remove obj 0x%" PRIxLEAST64
+ ". Was it created? Has it already been destroyed?",
+ object_handle);
}
}
-static void create_queue(VkDevice dispatchable_object, VkQueue vkObj, VkDebugReportObjectTypeEXT objType)
-{
- log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT, objType, reinterpret_cast<uint64_t>(vkObj), __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64 , object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
- reinterpret_cast<uint64_t>(vkObj));
+static void create_queue(VkDevice dispatchable_object, VkQueue vkObj,
+ VkDebugReportObjectTypeEXT objType) {
+ log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT, objType,
+ reinterpret_cast<uint64_t>(vkObj), __LINE__, OBJTRACK_NONE,
+ "OBJTRACK", "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64,
+ object_track_index++, string_VkDebugReportObjectTypeEXT(objType),
+ reinterpret_cast<uint64_t>(vkObj));
- OBJTRACK_NODE* pNewObjNode = new OBJTRACK_NODE;
+ OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
pNewObjNode->objType = objType;
- pNewObjNode->status = OBJSTATUS_NONE;
- pNewObjNode->vkObj = reinterpret_cast<uint64_t>(vkObj);
+ pNewObjNode->status = OBJSTATUS_NONE;
+ pNewObjNode->vkObj = reinterpret_cast<uint64_t>(vkObj);
VkQueueMap[reinterpret_cast<uint64_t>(vkObj)] = pNewObjNode;
uint32_t objIndex = objTypeToIndex(objType);
numObjs[objIndex]++;
numTotalObjs++;
}
-static void create_swapchain_image_obj(VkDevice dispatchable_object, VkImage vkObj, VkSwapchainKHR swapchain)
-{
- log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t) vkObj, __LINE__, OBJTRACK_NONE, "OBJTRACK",
- "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64 , object_track_index++, "SwapchainImage",
- (uint64_t)(vkObj));
+static void create_swapchain_image_obj(VkDevice dispatchable_object,
+ VkImage vkObj,
+ VkSwapchainKHR swapchain) {
+ log_msg(mdd(dispatchable_object), VK_DEBUG_REPORT_INFO_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, (uint64_t)vkObj, __LINE__,
+ OBJTRACK_NONE, "OBJTRACK",
+ "OBJ[%llu] : CREATE %s object 0x%" PRIxLEAST64,
+ object_track_index++, "SwapchainImage", (uint64_t)(vkObj));
- OBJTRACK_NODE* pNewObjNode = new OBJTRACK_NODE;
- pNewObjNode->objType = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
- pNewObjNode->status = OBJSTATUS_NONE;
- pNewObjNode->vkObj = (uint64_t) vkObj;
- pNewObjNode->parentObj = (uint64_t) swapchain;
+ OBJTRACK_NODE *pNewObjNode = new OBJTRACK_NODE;
+ pNewObjNode->objType = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT;
+ pNewObjNode->status = OBJSTATUS_NONE;
+ pNewObjNode->vkObj = (uint64_t)vkObj;
+ pNewObjNode->parentObj = (uint64_t)swapchain;
swapchainImageMap[(uint64_t)(vkObj)] = pNewObjNode;
}
//
// Non-auto-generated API functions called by generated code
//
-VkResult
-explicit_CreateInstance(
- const VkInstanceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkInstance *pInstance)
-{
- VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VkResult explicit_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkInstance *pInstance) {
+ VkLayerInstanceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkCreateInstance fpCreateInstance =
+ (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
if (fpCreateInstance == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -723,31 +858,31 @@
return result;
}
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
- initInstanceTable(*pInstance, fpGetInstanceProcAddr, object_tracker_instance_table_map);
- VkLayerInstanceDispatchTable *pInstanceTable = get_dispatch_table(object_tracker_instance_table_map, *pInstance);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
+ initInstanceTable(*pInstance, fpGetInstanceProcAddr,
+ object_tracker_instance_table_map);
+ VkLayerInstanceDispatchTable *pInstanceTable =
+ get_dispatch_table(object_tracker_instance_table_map, *pInstance);
my_data->report_data = debug_report_create_instance(
- pInstanceTable,
- *pInstance,
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
+ pInstanceTable, *pInstance, pCreateInfo->enabledExtensionCount,
+ pCreateInfo->ppEnabledExtensionNames);
initObjectTracker(my_data, pAllocator);
createInstanceRegisterExtensions(pCreateInfo, *pInstance);
- create_instance(*pInstance, *pInstance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT);
+ create_instance(*pInstance, *pInstance,
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT);
return result;
}
-void
-explicit_GetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice gpu,
- uint32_t* pCount,
- VkQueueFamilyProperties* pProperties)
-{
- get_dispatch_table(object_tracker_instance_table_map, gpu)->GetPhysicalDeviceQueueFamilyProperties(gpu, pCount, pProperties);
+void explicit_GetPhysicalDeviceQueueFamilyProperties(
+ VkPhysicalDevice gpu, uint32_t *pCount,
+ VkQueueFamilyProperties *pProperties) {
+ get_dispatch_table(object_tracker_instance_table_map, gpu)
+ ->GetPhysicalDeviceQueueFamilyProperties(gpu, pCount, pProperties);
loader_platform_thread_lock_mutex(&objLock);
if (pProperties != NULL)
@@ -755,20 +890,21 @@
loader_platform_thread_unlock_mutex(&objLock);
}
-VkResult
-explicit_CreateDevice(
- VkPhysicalDevice gpu,
- const VkDeviceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkDevice *pDevice)
-{
+VkResult explicit_CreateDevice(VkPhysicalDevice gpu,
+ const VkDeviceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDevice *pDevice) {
loader_platform_thread_lock_mutex(&objLock);
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+ VkLayerDeviceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+ PFN_vkCreateDevice fpCreateDevice =
+ (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
if (fpCreateDevice == NULL) {
loader_platform_thread_unlock_mutex(&objLock);
return VK_ERROR_INITIALIZATION_FAILED;
@@ -783,11 +919,15 @@
return result;
}
- layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
- my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
+ layer_data *my_instance_data =
+ get_my_data_ptr(get_dispatch_key(gpu), layer_data_map);
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
+ my_device_data->report_data = layer_debug_report_create_device(
+ my_instance_data->report_data, *pDevice);
- initDeviceTable(*pDevice, fpGetDeviceProcAddr, object_tracker_device_table_map);
+ initDeviceTable(*pDevice, fpGetDeviceProcAddr,
+ object_tracker_device_table_map);
createDeviceRegisterExtensions(pCreateInfo, *pDevice);
@@ -797,20 +937,27 @@
return result;
}
-VkResult explicit_EnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices)
-{
+VkResult explicit_EnumeratePhysicalDevices(VkInstance instance,
+ uint32_t *pPhysicalDeviceCount,
+ VkPhysicalDevice *pPhysicalDevices) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= validate_instance(instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
+ skipCall |= validate_instance(
+ instance, instance, VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, false);
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = get_dispatch_table(object_tracker_instance_table_map, instance)->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
+ VkResult result =
+ get_dispatch_table(object_tracker_instance_table_map, instance)
+ ->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount,
+ pPhysicalDevices);
loader_platform_thread_lock_mutex(&objLock);
if (result == VK_SUCCESS) {
if (pPhysicalDevices) {
for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
- create_physical_device(instance, pPhysicalDevices[i], VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT);
+ create_physical_device(
+ instance, pPhysicalDevices[i],
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT);
}
}
}
@@ -818,18 +965,15 @@
return result;
}
-void
-explicit_GetDeviceQueue(
- VkDevice device,
- uint32_t queueNodeIndex,
- uint32_t queueIndex,
- VkQueue *pQueue)
-{
+void explicit_GetDeviceQueue(VkDevice device, uint32_t queueNodeIndex,
+ uint32_t queueIndex, VkQueue *pQueue) {
loader_platform_thread_lock_mutex(&objLock);
- validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ false);
loader_platform_thread_unlock_mutex(&objLock);
- get_dispatch_table(object_tracker_device_table_map, device)->GetDeviceQueue(device, queueNodeIndex, queueIndex, pQueue);
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->GetDeviceQueue(device, queueNodeIndex, queueIndex, pQueue);
loader_platform_thread_lock_mutex(&objLock);
addQueueInfo(queueNodeIndex, *pQueue);
@@ -837,121 +981,130 @@
loader_platform_thread_unlock_mutex(&objLock);
}
-VkResult
-explicit_MapMemory(
- VkDevice device,
- VkDeviceMemory mem,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkFlags flags,
- void **ppData)
-{
+VkResult explicit_MapMemory(VkDevice device, VkDeviceMemory mem,
+ VkDeviceSize offset, VkDeviceSize size,
+ VkFlags flags, void **ppData) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= set_device_memory_status(device, mem, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, OBJSTATUS_GPU_MEM_MAPPED);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= set_device_memory_status(
+ device, mem, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ OBJSTATUS_GPU_MEM_MAPPED);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall == VK_TRUE)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = get_dispatch_table(object_tracker_device_table_map, device)->MapMemory(device, mem, offset, size, flags, ppData);
+ VkResult result =
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->MapMemory(device, mem, offset, size, flags, ppData);
return result;
}
-void
-explicit_UnmapMemory(
- VkDevice device,
- VkDeviceMemory mem)
-{
+void explicit_UnmapMemory(VkDevice device, VkDeviceMemory mem) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= reset_device_memory_status(device, mem, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, OBJSTATUS_GPU_MEM_MAPPED);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= reset_device_memory_status(
+ device, mem, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+ OBJSTATUS_GPU_MEM_MAPPED);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall == VK_TRUE)
return;
- get_dispatch_table(object_tracker_device_table_map, device)->UnmapMemory(device, mem);
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->UnmapMemory(device, mem);
}
-VkResult
-explicit_QueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence)
-{
+VkResult explicit_QueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
+ const VkBindSparseInfo *pBindInfo,
+ VkFence fence) {
loader_platform_thread_lock_mutex(&objLock);
validateQueueFlags(queue, "QueueBindSparse");
for (uint32_t i = 0; i < bindInfoCount; i++) {
for (uint32_t j = 0; j < pBindInfo[i].bufferBindCount; j++)
- validate_buffer(queue, pBindInfo[i].pBufferBinds[j].buffer, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
+ validate_buffer(queue, pBindInfo[i].pBufferBinds[j].buffer,
+ VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, false);
for (uint32_t j = 0; j < pBindInfo[i].imageOpaqueBindCount; j++)
- validate_image(queue, pBindInfo[i].pImageOpaqueBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
+ validate_image(queue, pBindInfo[i].pImageOpaqueBinds[j].image,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
for (uint32_t j = 0; j < pBindInfo[i].imageBindCount; j++)
- validate_image(queue, pBindInfo[i].pImageBinds[j].image, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
+ validate_image(queue, pBindInfo[i].pImageBinds[j].image,
+ VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, false);
}
loader_platform_thread_unlock_mutex(&objLock);
- VkResult result = get_dispatch_table(object_tracker_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+ VkResult result =
+ get_dispatch_table(object_tracker_device_table_map, queue)
+ ->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
return result;
}
-VkResult
-explicit_AllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo *pAllocateInfo,
- VkCommandBuffer* pCommandBuffers)
-{
+VkResult explicit_AllocateCommandBuffers(
+ VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo,
+ VkCommandBuffer *pCommandBuffers) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
- skipCall |= validate_command_pool(device, pAllocateInfo->commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= validate_command_pool(
+ device, pAllocateInfo->commandPool,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall) {
return VK_ERROR_VALIDATION_FAILED_EXT;
}
- VkResult result = get_dispatch_table(object_tracker_device_table_map, device)->AllocateCommandBuffers(
- device, pAllocateInfo, pCommandBuffers);
+ VkResult result =
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
loader_platform_thread_lock_mutex(&objLock);
for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
- alloc_command_buffer(device, pAllocateInfo->commandPool, pCommandBuffers[i], VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, pAllocateInfo->level);
+ alloc_command_buffer(device, pAllocateInfo->commandPool,
+ pCommandBuffers[i],
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ pAllocateInfo->level);
}
loader_platform_thread_unlock_mutex(&objLock);
return result;
}
-VkResult
-explicit_AllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo *pAllocateInfo,
- VkDescriptorSet *pDescriptorSets)
-{
+VkResult explicit_AllocateDescriptorSets(
+ VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
+ VkDescriptorSet *pDescriptorSets) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
- skipCall |= validate_descriptor_pool(device, pAllocateInfo->descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= validate_descriptor_pool(
+ device, pAllocateInfo->descriptorPool,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
- skipCall |= validate_descriptor_set_layout(device, pAllocateInfo->pSetLayouts[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
+ skipCall |= validate_descriptor_set_layout(
+ device, pAllocateInfo->pSetLayouts[i],
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, false);
}
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = get_dispatch_table(object_tracker_device_table_map, device)->AllocateDescriptorSets(
- device, pAllocateInfo, pDescriptorSets);
+ VkResult result =
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
if (VK_SUCCESS == result) {
loader_platform_thread_lock_mutex(&objLock);
for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
- alloc_descriptor_set(device, pAllocateInfo->descriptorPool, pDescriptorSets[i], VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
+ alloc_descriptor_set(
+ device, pAllocateInfo->descriptorPool, pDescriptorSets[i],
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT);
}
loader_platform_thread_unlock_mutex(&objLock);
}
@@ -959,113 +1112,112 @@
return result;
}
-void
-explicit_FreeCommandBuffers(
- VkDevice device,
- VkCommandPool commandPool,
- uint32_t commandBufferCount,
- const VkCommandBuffer *pCommandBuffers)
-{
+void explicit_FreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
+ uint32_t commandBufferCount,
+ const VkCommandBuffer *pCommandBuffers) {
loader_platform_thread_lock_mutex(&objLock);
- validate_command_pool(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
- validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ validate_command_pool(device, commandPool,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
+ validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ false);
loader_platform_thread_unlock_mutex(&objLock);
- get_dispatch_table(object_tracker_device_table_map, device)->FreeCommandBuffers(device,
- commandPool, commandBufferCount, pCommandBuffers);
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->FreeCommandBuffers(device, commandPool, commandBufferCount,
+ pCommandBuffers);
loader_platform_thread_lock_mutex(&objLock);
- for (uint32_t i = 0; i < commandBufferCount; i++)
- {
+ for (uint32_t i = 0; i < commandBufferCount; i++) {
free_command_buffer(device, commandPool, *pCommandBuffers);
pCommandBuffers++;
}
loader_platform_thread_unlock_mutex(&objLock);
}
-void
-explicit_DestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks *pAllocator)
-{
+void explicit_DestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain,
+ const VkAllocationCallbacks *pAllocator) {
loader_platform_thread_lock_mutex(&objLock);
- // A swapchain's images are implicitly deleted when the swapchain is deleted.
+ // A swapchain's images are implicitly deleted when the swapchain is
+ // deleted.
// Remove this swapchain's images from our map of such images.
- unordered_map<uint64_t, OBJTRACK_NODE*>::iterator itr = swapchainImageMap.begin();
+ unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
+ swapchainImageMap.begin();
while (itr != swapchainImageMap.end()) {
- OBJTRACK_NODE* pNode = (*itr).second;
+ OBJTRACK_NODE *pNode = (*itr).second;
if (pNode->parentObj == (uint64_t)(swapchain)) {
- swapchainImageMap.erase(itr++);
+ swapchainImageMap.erase(itr++);
} else {
- ++itr;
+ ++itr;
}
}
destroy_swapchain_khr(device, swapchain);
loader_platform_thread_unlock_mutex(&objLock);
- get_dispatch_table(object_tracker_device_table_map, device)->DestroySwapchainKHR(device, swapchain, pAllocator);
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->DestroySwapchainKHR(device, swapchain, pAllocator);
}
-void
-explicit_FreeMemory(
- VkDevice device,
- VkDeviceMemory mem,
- const VkAllocationCallbacks* pAllocator)
-{
+void explicit_FreeMemory(VkDevice device, VkDeviceMemory mem,
+ const VkAllocationCallbacks *pAllocator) {
loader_platform_thread_lock_mutex(&objLock);
- validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ false);
loader_platform_thread_unlock_mutex(&objLock);
- get_dispatch_table(object_tracker_device_table_map, device)->FreeMemory(device, mem, pAllocator);
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->FreeMemory(device, mem, pAllocator);
loader_platform_thread_lock_mutex(&objLock);
destroy_device_memory(device, mem);
loader_platform_thread_unlock_mutex(&objLock);
}
-VkResult
-explicit_FreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t count,
- const VkDescriptorSet *pDescriptorSets)
-{
+VkResult explicit_FreeDescriptorSets(VkDevice device,
+ VkDescriptorPool descriptorPool,
+ uint32_t count,
+ const VkDescriptorSet *pDescriptorSets) {
loader_platform_thread_lock_mutex(&objLock);
- validate_descriptor_pool(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
- validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ validate_descriptor_pool(device, descriptorPool,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ false);
+ validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ false);
loader_platform_thread_unlock_mutex(&objLock);
- VkResult result = get_dispatch_table(object_tracker_device_table_map, device)->FreeDescriptorSets(device, descriptorPool, count, pDescriptorSets);
+ VkResult result =
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->FreeDescriptorSets(device, descriptorPool, count,
+ pDescriptorSets);
loader_platform_thread_lock_mutex(&objLock);
- for (uint32_t i=0; i<count; i++)
- {
+ for (uint32_t i = 0; i < count; i++) {
free_descriptor_set(device, descriptorPool, *pDescriptorSets++);
}
loader_platform_thread_unlock_mutex(&objLock);
return result;
}
-void
-explicit_DestroyDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- const VkAllocationCallbacks *pAllocator)
-{
+void explicit_DestroyDescriptorPool(VkDevice device,
+ VkDescriptorPool descriptorPool,
+ const VkAllocationCallbacks *pAllocator) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
- skipCall |= validate_descriptor_pool(device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, false);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= validate_descriptor_pool(
+ device, descriptorPool, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+ false);
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall) {
return;
}
- // A DescriptorPool's descriptor sets are implicitly deleted when the pool is deleted.
+ // A DescriptorPool's descriptor sets are implicitly deleted when the pool
+ // is deleted.
// Remove this pool's descriptor sets from our descriptorSet map.
loader_platform_thread_lock_mutex(&objLock);
- unordered_map<uint64_t, OBJTRACK_NODE*>::iterator itr = VkDescriptorSetMap.begin();
+ unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
+ VkDescriptorSetMap.begin();
while (itr != VkDescriptorSetMap.end()) {
- OBJTRACK_NODE* pNode = (*itr).second;
+ OBJTRACK_NODE *pNode = (*itr).second;
auto del_itr = itr++;
if (pNode->parentObj == (uint64_t)(descriptorPool)) {
destroy_descriptor_set(device, (VkDescriptorSet)((*del_itr).first));
@@ -1073,56 +1225,61 @@
}
destroy_descriptor_pool(device, descriptorPool);
loader_platform_thread_unlock_mutex(&objLock);
- get_dispatch_table(object_tracker_device_table_map, device)->DestroyDescriptorPool(device, descriptorPool, pAllocator);
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->DestroyDescriptorPool(device, descriptorPool, pAllocator);
}
-void
-explicit_DestroyCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- const VkAllocationCallbacks *pAllocator)
-{
+void explicit_DestroyCommandPool(VkDevice device, VkCommandPool commandPool,
+ const VkAllocationCallbacks *pAllocator) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
- skipCall |= validate_command_pool(device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, false);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= validate_command_pool(
+ device, commandPool, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+ false);
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall) {
return;
}
loader_platform_thread_lock_mutex(&objLock);
- // A CommandPool's command buffers are implicitly deleted when the pool is deleted.
+ // A CommandPool's command buffers are implicitly deleted when the pool is
+ // deleted.
// Remove this pool's cmdBuffers from our cmd buffer map.
- unordered_map<uint64_t, OBJTRACK_NODE*>::iterator itr = VkCommandBufferMap.begin();
- unordered_map<uint64_t, OBJTRACK_NODE*>::iterator del_itr;
+ unordered_map<uint64_t, OBJTRACK_NODE *>::iterator itr =
+ VkCommandBufferMap.begin();
+ unordered_map<uint64_t, OBJTRACK_NODE *>::iterator del_itr;
while (itr != VkCommandBufferMap.end()) {
- OBJTRACK_NODE* pNode = (*itr).second;
+ OBJTRACK_NODE *pNode = (*itr).second;
del_itr = itr++;
if (pNode->parentObj == (uint64_t)(commandPool)) {
- destroy_command_buffer(reinterpret_cast<VkCommandBuffer>((*del_itr).first),
- reinterpret_cast<VkCommandBuffer>((*del_itr).first));
+ destroy_command_buffer(
+ reinterpret_cast<VkCommandBuffer>((*del_itr).first),
+ reinterpret_cast<VkCommandBuffer>((*del_itr).first));
}
}
destroy_command_pool(device, commandPool);
loader_platform_thread_unlock_mutex(&objLock);
- get_dispatch_table(object_tracker_device_table_map, device)->DestroyCommandPool(device, commandPool, pAllocator);
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->DestroyCommandPool(device, commandPool, pAllocator);
}
-VkResult
-explicit_GetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t *pCount,
- VkImage *pSwapchainImages)
-{
+VkResult explicit_GetSwapchainImagesKHR(VkDevice device,
+ VkSwapchainKHR swapchain,
+ uint32_t *pCount,
+ VkImage *pSwapchainImages) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = get_dispatch_table(object_tracker_device_table_map, device)->GetSwapchainImagesKHR(device, swapchain, pCount, pSwapchainImages);
+ VkResult result =
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->GetSwapchainImagesKHR(device, swapchain, pCount,
+ pSwapchainImages);
if (pSwapchainImages != NULL) {
loader_platform_thread_lock_mutex(&objLock);
@@ -1134,93 +1291,115 @@
return result;
}
-// TODO: Add special case to codegen to cover validating all the pipelines instead of just the first
-VkResult
-explicit_CreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
+// TODO: Add special case to codegen to cover validating all the pipelines
+// instead of just the first
+VkResult explicit_CreateGraphicsPipelines(
+ VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
const VkGraphicsPipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator,
- VkPipeline *pPipelines)
-{
+ const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
if (pCreateInfos) {
- for (uint32_t idx0=0; idx0<createInfoCount; ++idx0) {
+ for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
if (pCreateInfos[idx0].basePipelineHandle) {
- skipCall |= validate_pipeline(device, pCreateInfos[idx0].basePipelineHandle, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
+ skipCall |= validate_pipeline(
+ device, pCreateInfos[idx0].basePipelineHandle,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
}
if (pCreateInfos[idx0].layout) {
- skipCall |= validate_pipeline_layout(device, pCreateInfos[idx0].layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
+ skipCall |= validate_pipeline_layout(
+ device, pCreateInfos[idx0].layout,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
}
if (pCreateInfos[idx0].pStages) {
- for (uint32_t idx1=0; idx1<pCreateInfos[idx0].stageCount; ++idx1) {
+ for (uint32_t idx1 = 0; idx1 < pCreateInfos[idx0].stageCount;
+ ++idx1) {
if (pCreateInfos[idx0].pStages[idx1].module) {
- skipCall |= validate_shader_module(device, pCreateInfos[idx0].pStages[idx1].module, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
+ skipCall |= validate_shader_module(
+ device, pCreateInfos[idx0].pStages[idx1].module,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+ false);
}
}
}
if (pCreateInfos[idx0].renderPass) {
- skipCall |= validate_render_pass(device, pCreateInfos[idx0].renderPass, VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
+ skipCall |= validate_render_pass(
+ device, pCreateInfos[idx0].renderPass,
+ VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, false);
}
}
}
if (pipelineCache) {
- skipCall |= validate_pipeline_cache(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
+ skipCall |= validate_pipeline_cache(
+ device, pipelineCache,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
}
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = get_dispatch_table(object_tracker_device_table_map, device)->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+ VkResult result =
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
+ pCreateInfos, pAllocator, pPipelines);
loader_platform_thread_lock_mutex(&objLock);
if (result == VK_SUCCESS) {
for (uint32_t idx2 = 0; idx2 < createInfoCount; ++idx2) {
- create_pipeline(device, pPipelines[idx2], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
+ create_pipeline(device, pPipelines[idx2],
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
}
}
loader_platform_thread_unlock_mutex(&objLock);
return result;
}
-// TODO: Add special case to codegen to cover validating all the pipelines instead of just the first
-VkResult
-explicit_CreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t createInfoCount,
+// TODO: Add special case to codegen to cover validating all the pipelines
+// instead of just the first
+VkResult explicit_CreateComputePipelines(
+ VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
const VkComputePipelineCreateInfo *pCreateInfos,
- const VkAllocationCallbacks *pAllocator,
- VkPipeline *pPipelines)
-{
+ const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
VkBool32 skipCall = VK_FALSE;
loader_platform_thread_lock_mutex(&objLock);
- skipCall |= validate_device(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
+ skipCall |= validate_device(device, device,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, false);
if (pCreateInfos) {
- for (uint32_t idx0=0; idx0<createInfoCount; ++idx0) {
+ for (uint32_t idx0 = 0; idx0 < createInfoCount; ++idx0) {
if (pCreateInfos[idx0].basePipelineHandle) {
- skipCall |= validate_pipeline(device, pCreateInfos[idx0].basePipelineHandle, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
+ skipCall |= validate_pipeline(
+ device, pCreateInfos[idx0].basePipelineHandle,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, true);
}
if (pCreateInfos[idx0].layout) {
- skipCall |= validate_pipeline_layout(device, pCreateInfos[idx0].layout, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
+ skipCall |= validate_pipeline_layout(
+ device, pCreateInfos[idx0].layout,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, false);
}
if (pCreateInfos[idx0].stage.module) {
- skipCall |= validate_shader_module(device, pCreateInfos[idx0].stage.module, VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
+ skipCall |= validate_shader_module(
+ device, pCreateInfos[idx0].stage.module,
+ VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, false);
}
}
}
if (pipelineCache) {
- skipCall |= validate_pipeline_cache(device, pipelineCache, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
+ skipCall |= validate_pipeline_cache(
+ device, pipelineCache,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, false);
}
loader_platform_thread_unlock_mutex(&objLock);
if (skipCall)
return VK_ERROR_VALIDATION_FAILED_EXT;
- VkResult result = get_dispatch_table(object_tracker_device_table_map, device)->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+ VkResult result =
+ get_dispatch_table(object_tracker_device_table_map, device)
+ ->CreateComputePipelines(device, pipelineCache, createInfoCount,
+ pCreateInfos, pAllocator, pPipelines);
loader_platform_thread_lock_mutex(&objLock);
if (result == VK_SUCCESS) {
for (uint32_t idx1 = 0; idx1 < createInfoCount; ++idx1) {
- create_pipeline(device, pPipelines[idx1], VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
+ create_pipeline(device, pPipelines[idx1],
+ VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT);
}
}
loader_platform_thread_unlock_mutex(&objLock);
diff --git a/layers/param_checker.cpp b/layers/param_checker.cpp
index 905cc13..1cdeec2 100644
--- a/layers/param_checker.cpp
+++ b/layers/param_checker.cpp
@@ -56,27 +56,25 @@
debug_report_data *report_data;
std::vector<VkDebugReportCallbackEXT> logging_callback;
- //TODO: Split instance/device structs
- //Device Data
- //Map for queue family index to queue count
+ // TODO: Split instance/device structs
+ // Device Data
+ // Map for queue family index to queue count
std::unordered_map<uint32_t, uint32_t> queueFamilyIndexMap;
- layer_data() :
- report_data(nullptr)
- {};
+ layer_data() : report_data(nullptr){};
};
-static std::unordered_map<void*, layer_data*> layer_data_map;
+static std::unordered_map<void *, layer_data *> layer_data_map;
static device_table_map pc_device_table_map;
static instance_table_map pc_instance_table_map;
// "my instance data"
-debug_report_data *mid(VkInstance object)
-{
+debug_report_data *mid(VkInstance object) {
dispatch_key key = get_dispatch_key(object);
layer_data *data = get_my_data_ptr(key, layer_data_map);
#if DISPATCH_MAP_DEBUG
- fprintf(stderr, "MID: map: %p, object: %p, key: %p, data: %p\n", &layer_data_map, object, key, data);
+ fprintf(stderr, "MID: map: %p, object: %p, key: %p, data: %p\n",
+ &layer_data_map, object, key, data);
#endif
assert(data != NULL);
@@ -84,28 +82,27 @@
}
// "my device data"
-debug_report_data *mdd(void* object)
-{
+debug_report_data *mdd(void *object) {
dispatch_key key = get_dispatch_key(object);
layer_data *data = get_my_data_ptr(key, layer_data_map);
#if DISPATCH_MAP_DEBUG
- fprintf(stderr, "MDD: map: %p, object: %p, key: %p, data: %p\n", &layer_data_map, object, key, data);
+ fprintf(stderr, "MDD: map: %p, object: %p, key: %p, data: %p\n",
+ &layer_data_map, object, key, data);
#endif
assert(data != NULL);
return data->report_data;
}
-static void InitParamChecker(layer_data *data, const VkAllocationCallbacks *pAllocator)
-{
+static void InitParamChecker(layer_data *data,
+ const VkAllocationCallbacks *pAllocator) {
VkDebugReportCallbackEXT callback;
uint32_t report_flags = getLayerOptionFlags("ParamCheckerReportFlags", 0);
uint32_t debug_action = 0;
- getLayerOptionEnum("ParamCheckerDebugAction", (uint32_t *) &debug_action);
- if(debug_action & VK_DBG_LAYER_ACTION_LOG_MSG)
- {
+ getLayerOptionEnum("ParamCheckerDebugAction", (uint32_t *)&debug_action);
+ if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
FILE *log_output = NULL;
- const char* option_str = getLayerOption("ParamCheckerLogFilename");
+ const char *option_str = getLayerOption("ParamCheckerLogFilename");
log_output = getLayerLogOutput(option_str, "ParamChecker");
VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
memset(&dbgCreateInfo, 0, sizeof(dbgCreateInfo));
@@ -114,7 +111,8 @@
dbgCreateInfo.pfnCallback = log_callback;
dbgCreateInfo.pUserData = log_output;
- layer_create_msg_callback(data->report_data, &dbgCreateInfo, pAllocator, &callback);
+ layer_create_msg_callback(data->report_data, &dbgCreateInfo, pAllocator,
+ &callback);
data->logging_callback.push_back(callback);
}
@@ -126,209 +124,174 @@
dbgCreateInfo.pfnCallback = win32_debug_output_msg;
dbgCreateInfo.pUserData = NULL;
- layer_create_msg_callback(data->report_data, &dbgCreateInfo, pAllocator, &callback);
+ layer_create_msg_callback(data->report_data, &dbgCreateInfo, pAllocator,
+ &callback);
data->logging_callback.push_back(callback);
}
}
VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
+ VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
- VkDebugReportCallbackEXT* pMsgCallback)
-{
- VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance);
- VkResult result = pTable->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
+ VkDebugReportCallbackEXT *pMsgCallback) {
+ VkLayerInstanceDispatchTable *pTable =
+ get_dispatch_table(pc_instance_table_map, instance);
+ VkResult result = pTable->CreateDebugReportCallbackEXT(
+ instance, pCreateInfo, pAllocator, pMsgCallback);
- if (result == VK_SUCCESS)
- {
- layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- result = layer_create_msg_callback(data->report_data, pCreateInfo, pAllocator, pMsgCallback);
+ if (result == VK_SUCCESS) {
+ layer_data *data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ result = layer_create_msg_callback(data->report_data, pCreateInfo,
+ pAllocator, pMsgCallback);
}
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
- VkInstance instance,
- VkDebugReportCallbackEXT msgCallback,
- const VkAllocationCallbacks *pAllocator)
-{
- VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDebugReportCallbackEXT(VkInstance instance,
+ VkDebugReportCallbackEXT msgCallback,
+ const VkAllocationCallbacks *pAllocator) {
+ VkLayerInstanceDispatchTable *pTable =
+ get_dispatch_table(pc_instance_table_map, instance);
pTable->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
- layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ layer_data *data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
layer_destroy_msg_callback(data->report_data, msgCallback, pAllocator);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objType,
- uint64_t object,
- size_t location,
- int32_t msgCode,
- const char* pLayerPrefix,
- const char* pMsg)
-{
- VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance);
- pTable->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags,
+ VkDebugReportObjectTypeEXT objType, uint64_t object,
+ size_t location, int32_t msgCode,
+ const char *pLayerPrefix, const char *pMsg) {
+ VkLayerInstanceDispatchTable *pTable =
+ get_dispatch_table(pc_instance_table_map, instance);
+ pTable->DebugReportMessageEXT(instance, flags, objType, object, location,
+ msgCode, pLayerPrefix, pMsg);
}
static const VkExtensionProperties instance_extensions[] = {
- {
- VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
- VK_EXT_DEBUG_REPORT_SPEC_VERSION
- }
-};
+ {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
- const char *pLayerName,
- uint32_t *pCount,
- VkExtensionProperties* pProperties)
-{
- return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceExtensionProperties(const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
+ return util_GetExtensionProperties(1, instance_extensions, pCount,
+ pProperties);
}
-static const VkLayerProperties pc_global_layers[] = {
- {
- "VK_LAYER_LUNARG_param_checker",
- VK_API_VERSION,
- VK_MAKE_VERSION(0, 1, 0),
- "Validation layer: param_checker",
- }
-};
+static const VkLayerProperties pc_global_layers[] = {{
+ "VK_LAYER_LUNARG_param_checker", VK_API_VERSION, VK_MAKE_VERSION(0, 1, 0),
+ "Validation layer: param_checker",
+}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
- uint32_t *pCount,
- VkLayerProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+ VkLayerProperties *pProperties) {
return util_GetLayerProperties(ARRAY_SIZE(pc_global_layers),
- pc_global_layers,
- pCount, pProperties);
+ pc_global_layers, pCount, pProperties);
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(
- VkPhysicalDevice physicalDevice,
- const char* pLayerName,
- uint32_t* pCount,
- VkExtensionProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
+ const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
/* ParamChecker does not have any physical device extensions */
if (pLayerName == NULL) {
- return get_dispatch_table(pc_instance_table_map, physicalDevice)->EnumerateDeviceExtensionProperties(
- physicalDevice,
- NULL,
- pCount,
- pProperties);
+ return get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->EnumerateDeviceExtensionProperties(physicalDevice, NULL, pCount,
+ pProperties);
} else {
return util_GetExtensionProperties(0, NULL, pCount, pProperties);
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCount,
- VkLayerProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
+ uint32_t *pCount,
+ VkLayerProperties *pProperties) {
/* ParamChecker's physical device layers are the same as global */
- return util_GetLayerProperties(ARRAY_SIZE(pc_global_layers), pc_global_layers,
- pCount, pProperties);
+ return util_GetLayerProperties(ARRAY_SIZE(pc_global_layers),
+ pc_global_layers, pCount, pProperties);
}
// Version: 0.138.2
-static
-std::string EnumeratorString(VkResult const& enumerator)
-{
- switch(enumerator)
- {
- case VK_RESULT_MAX_ENUM:
- {
- return "VK_RESULT_MAX_ENUM";
- break;
- }
- case VK_ERROR_LAYER_NOT_PRESENT:
- {
- return "VK_ERROR_LAYER_NOT_PRESENT";
- break;
- }
- case VK_ERROR_INCOMPATIBLE_DRIVER:
- {
- return "VK_ERROR_INCOMPATIBLE_DRIVER";
- break;
- }
- case VK_ERROR_MEMORY_MAP_FAILED:
- {
- return "VK_ERROR_MEMORY_MAP_FAILED";
- break;
- }
- case VK_INCOMPLETE:
- {
- return "VK_INCOMPLETE";
- break;
- }
- case VK_ERROR_OUT_OF_HOST_MEMORY:
- {
- return "VK_ERROR_OUT_OF_HOST_MEMORY";
- break;
- }
- case VK_ERROR_INITIALIZATION_FAILED:
- {
- return "VK_ERROR_INITIALIZATION_FAILED";
- break;
- }
- case VK_NOT_READY:
- {
- return "VK_NOT_READY";
- break;
- }
- case VK_ERROR_OUT_OF_DEVICE_MEMORY:
- {
- return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
- break;
- }
- case VK_EVENT_SET:
- {
- return "VK_EVENT_SET";
- break;
- }
- case VK_TIMEOUT:
- {
- return "VK_TIMEOUT";
- break;
- }
- case VK_EVENT_RESET:
- {
- return "VK_EVENT_RESET";
- break;
- }
- case VK_SUCCESS:
- {
- return "VK_SUCCESS";
- break;
- }
- case VK_ERROR_EXTENSION_NOT_PRESENT:
- {
- return "VK_ERROR_EXTENSION_NOT_PRESENT";
- break;
- }
- case VK_ERROR_DEVICE_LOST:
- {
- return "VK_ERROR_DEVICE_LOST";
- break;
- }
- default:
- {
- return "unrecognized enumerator";
- break;
- }
+static std::string EnumeratorString(VkResult const &enumerator) {
+ switch (enumerator) {
+ case VK_RESULT_MAX_ENUM: {
+ return "VK_RESULT_MAX_ENUM";
+ break;
+ }
+ case VK_ERROR_LAYER_NOT_PRESENT: {
+ return "VK_ERROR_LAYER_NOT_PRESENT";
+ break;
+ }
+ case VK_ERROR_INCOMPATIBLE_DRIVER: {
+ return "VK_ERROR_INCOMPATIBLE_DRIVER";
+ break;
+ }
+ case VK_ERROR_MEMORY_MAP_FAILED: {
+ return "VK_ERROR_MEMORY_MAP_FAILED";
+ break;
+ }
+ case VK_INCOMPLETE: {
+ return "VK_INCOMPLETE";
+ break;
+ }
+ case VK_ERROR_OUT_OF_HOST_MEMORY: {
+ return "VK_ERROR_OUT_OF_HOST_MEMORY";
+ break;
+ }
+ case VK_ERROR_INITIALIZATION_FAILED: {
+ return "VK_ERROR_INITIALIZATION_FAILED";
+ break;
+ }
+ case VK_NOT_READY: {
+ return "VK_NOT_READY";
+ break;
+ }
+ case VK_ERROR_OUT_OF_DEVICE_MEMORY: {
+ return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
+ break;
+ }
+ case VK_EVENT_SET: {
+ return "VK_EVENT_SET";
+ break;
+ }
+ case VK_TIMEOUT: {
+ return "VK_TIMEOUT";
+ break;
+ }
+ case VK_EVENT_RESET: {
+ return "VK_EVENT_RESET";
+ break;
+ }
+ case VK_SUCCESS: {
+ return "VK_SUCCESS";
+ break;
+ }
+ case VK_ERROR_EXTENSION_NOT_PRESENT: {
+ return "VK_ERROR_EXTENSION_NOT_PRESENT";
+ break;
+ }
+ case VK_ERROR_DEVICE_LOST: {
+ return "VK_ERROR_DEVICE_LOST";
+ break;
+ }
+ default: {
+ return "unrecognized enumerator";
+ break;
+ }
}
}
-static
-bool ValidateEnumerator(VkFormatFeatureFlagBits const& enumerator)
-{
+static bool ValidateEnumerator(VkFormatFeatureFlagBits const &enumerator) {
VkFormatFeatureFlagBits allFlags = (VkFormatFeatureFlagBits)(
VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT |
VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT |
@@ -340,86 +303,66 @@
VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT |
VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT |
VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT |
- VK_FORMAT_FEATURE_BLIT_SRC_BIT |
- VK_FORMAT_FEATURE_BLIT_DST_BIT |
+ VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT |
VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT);
- if(enumerator & (~allFlags))
- {
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkFormatFeatureFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkFormatFeatureFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT) {
strings.push_back("VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) {
strings.push_back("VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT) {
strings.push_back("VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT) {
strings.push_back("VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) {
strings.push_back("VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT) {
strings.push_back("VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) {
strings.push_back("VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) {
strings.push_back("VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) {
strings.push_back("VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
strings.push_back("VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_BLIT_SRC_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_BLIT_SRC_BIT) {
strings.push_back("VK_FORMAT_FEATURE_BLIT_SRC_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_BLIT_DST_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_BLIT_DST_BIT) {
strings.push_back("VK_FORMAT_FEATURE_BLIT_DST_BIT");
}
- if(enumerator & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT)
- {
+ if (enumerator & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) {
strings.push_back("VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -427,74 +370,57 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkImageUsageFlagBits const& enumerator)
-{
- VkImageUsageFlagBits allFlags = (VkImageUsageFlagBits)(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
+static bool ValidateEnumerator(VkImageUsageFlagBits const &enumerator) {
+ VkImageUsageFlagBits allFlags = (VkImageUsageFlagBits)(
+ VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
- VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
- VK_IMAGE_USAGE_STORAGE_BIT |
- VK_IMAGE_USAGE_SAMPLED_BIT |
- VK_IMAGE_USAGE_TRANSFER_DST_BIT |
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
+ VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
- if(enumerator & (~allFlags))
- {
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkImageUsageFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkImageUsageFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)
- {
+ if (enumerator & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) {
strings.push_back("VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT");
}
- if(enumerator & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
- {
+ if (enumerator & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
strings.push_back("VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT");
}
- if(enumerator & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
- {
+ if (enumerator & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) {
strings.push_back("VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT");
}
- if(enumerator & VK_IMAGE_USAGE_STORAGE_BIT)
- {
+ if (enumerator & VK_IMAGE_USAGE_STORAGE_BIT) {
strings.push_back("VK_IMAGE_USAGE_STORAGE_BIT");
}
- if(enumerator & VK_IMAGE_USAGE_SAMPLED_BIT)
- {
+ if (enumerator & VK_IMAGE_USAGE_SAMPLED_BIT) {
strings.push_back("VK_IMAGE_USAGE_SAMPLED_BIT");
}
- if(enumerator & VK_IMAGE_USAGE_TRANSFER_DST_BIT)
- {
+ if (enumerator & VK_IMAGE_USAGE_TRANSFER_DST_BIT) {
strings.push_back("VK_IMAGE_USAGE_TRANSFER_DST_BIT");
}
- if(enumerator & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT)
- {
+ if (enumerator & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT) {
strings.push_back("VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT");
}
- if(enumerator & VK_IMAGE_USAGE_TRANSFER_SRC_BIT)
- {
+ if (enumerator & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) {
strings.push_back("VK_IMAGE_USAGE_TRANSFER_SRC_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -502,55 +428,41 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkQueueFlagBits const& enumerator)
-{
- VkQueueFlagBits allFlags = (VkQueueFlagBits)(
- VK_QUEUE_TRANSFER_BIT |
- VK_QUEUE_COMPUTE_BIT |
- VK_QUEUE_SPARSE_BINDING_BIT |
- VK_QUEUE_GRAPHICS_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkQueueFlagBits const &enumerator) {
+ VkQueueFlagBits allFlags =
+ (VkQueueFlagBits)(VK_QUEUE_TRANSFER_BIT | VK_QUEUE_COMPUTE_BIT |
+ VK_QUEUE_SPARSE_BINDING_BIT | VK_QUEUE_GRAPHICS_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkQueueFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkQueueFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_QUEUE_TRANSFER_BIT)
- {
+ if (enumerator & VK_QUEUE_TRANSFER_BIT) {
strings.push_back("VK_QUEUE_TRANSFER_BIT");
}
- if(enumerator & VK_QUEUE_COMPUTE_BIT)
- {
+ if (enumerator & VK_QUEUE_COMPUTE_BIT) {
strings.push_back("VK_QUEUE_COMPUTE_BIT");
}
- if(enumerator & VK_QUEUE_SPARSE_BINDING_BIT)
- {
+ if (enumerator & VK_QUEUE_SPARSE_BINDING_BIT) {
strings.push_back("VK_QUEUE_SPARSE_BINDING_BIT");
}
- if(enumerator & VK_QUEUE_GRAPHICS_BIT)
- {
+ if (enumerator & VK_QUEUE_GRAPHICS_BIT) {
strings.push_back("VK_QUEUE_GRAPHICS_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -558,59 +470,48 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkMemoryPropertyFlagBits const& enumerator)
-{
- VkMemoryPropertyFlagBits allFlags = (VkMemoryPropertyFlagBits)(VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT |
- VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
- VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
- VK_MEMORY_PROPERTY_HOST_CACHED_BIT |
- VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkMemoryPropertyFlagBits const &enumerator) {
+ VkMemoryPropertyFlagBits allFlags =
+ (VkMemoryPropertyFlagBits)(VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT |
+ VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
+ VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
+ VK_MEMORY_PROPERTY_HOST_CACHED_BIT |
+ VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkMemoryPropertyFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkMemoryPropertyFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)
- {
+ if (enumerator & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) {
strings.push_back("VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT");
}
- if(enumerator & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
- {
+ if (enumerator & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
strings.push_back("VK_MEMORY_PROPERTY_HOST_COHERENT_BIT");
}
- if(enumerator & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
- {
+ if (enumerator & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
strings.push_back("VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT");
}
- if(enumerator & VK_MEMORY_PROPERTY_HOST_CACHED_BIT)
- {
+ if (enumerator & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
strings.push_back("VK_MEMORY_PROPERTY_HOST_CACHED_BIT");
}
- if(enumerator & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
- {
+ if (enumerator & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
strings.push_back("VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -618,39 +519,31 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkMemoryHeapFlagBits const& enumerator)
-{
- VkMemoryHeapFlagBits allFlags = (VkMemoryHeapFlagBits)(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkMemoryHeapFlagBits const &enumerator) {
+ VkMemoryHeapFlagBits allFlags =
+ (VkMemoryHeapFlagBits)(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkMemoryHeapFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkMemoryHeapFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT)
- {
+ if (enumerator & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) {
strings.push_back("VK_MEMORY_HEAP_DEVICE_LOCAL_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -658,49 +551,40 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkSparseImageFormatFlagBits const& enumerator)
-{
- VkSparseImageFormatFlagBits allFlags = (VkSparseImageFormatFlagBits)(VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT |
+static bool ValidateEnumerator(VkSparseImageFormatFlagBits const &enumerator) {
+ VkSparseImageFormatFlagBits allFlags = (VkSparseImageFormatFlagBits)(
+ VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT |
VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT |
VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT);
- if(enumerator & (~allFlags))
- {
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkSparseImageFormatFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkSparseImageFormatFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT)
- {
+ if (enumerator & VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT) {
strings.push_back("VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT");
}
- if(enumerator & VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT)
- {
+ if (enumerator & VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT) {
strings.push_back("VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT");
}
- if(enumerator & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT)
- {
+ if (enumerator & VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT) {
strings.push_back("VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -708,39 +592,31 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkFenceCreateFlagBits const& enumerator)
-{
- VkFenceCreateFlagBits allFlags = (VkFenceCreateFlagBits)(VK_FENCE_CREATE_SIGNALED_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkFenceCreateFlagBits const &enumerator) {
+ VkFenceCreateFlagBits allFlags =
+ (VkFenceCreateFlagBits)(VK_FENCE_CREATE_SIGNALED_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkFenceCreateFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkFenceCreateFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_FENCE_CREATE_SIGNALED_BIT)
- {
+ if (enumerator & VK_FENCE_CREATE_SIGNALED_BIT) {
strings.push_back("VK_FENCE_CREATE_SIGNALED_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -748,10 +624,10 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkQueryPipelineStatisticFlagBits const& enumerator)
-{
- VkQueryPipelineStatisticFlagBits allFlags = (VkQueryPipelineStatisticFlagBits)(VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT |
+static bool
+ValidateEnumerator(VkQueryPipelineStatisticFlagBits const &enumerator) {
+ VkQueryPipelineStatisticFlagBits allFlags = (VkQueryPipelineStatisticFlagBits)(
+ VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT |
VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT |
VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT |
VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT |
@@ -762,75 +638,78 @@
VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT |
VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT |
VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT);
- if(enumerator & (~allFlags))
- {
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkQueryPipelineStatisticFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkQueryPipelineStatisticFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT");
+ if (enumerator &
+ VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT");
+ if (enumerator & VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT");
+ if (enumerator &
+ VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT");
+ if (enumerator & VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT");
+ if (enumerator &
+ VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT");
+ if (enumerator &
+ VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT");
+ if (enumerator &
+ VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT");
+ if (enumerator & VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT");
+ if (enumerator &
+ VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT) {
+ strings.push_back("VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_"
+ "SHADER_PATCHES_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT");
+ if (enumerator &
+ VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT) {
+ strings.push_back(
+ "VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT");
}
- if(enumerator & VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT)
- {
- strings.push_back("VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT");
+ if (enumerator &
+ VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT) {
+ strings.push_back("VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_"
+ "SHADER_INVOCATIONS_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -838,54 +717,41 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkQueryResultFlagBits const& enumerator)
-{
- VkQueryResultFlagBits allFlags = (VkQueryResultFlagBits)(VK_QUERY_RESULT_PARTIAL_BIT |
- VK_QUERY_RESULT_WITH_AVAILABILITY_BIT |
- VK_QUERY_RESULT_WAIT_BIT |
- VK_QUERY_RESULT_64_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkQueryResultFlagBits const &enumerator) {
+ VkQueryResultFlagBits allFlags = (VkQueryResultFlagBits)(
+ VK_QUERY_RESULT_PARTIAL_BIT | VK_QUERY_RESULT_WITH_AVAILABILITY_BIT |
+ VK_QUERY_RESULT_WAIT_BIT | VK_QUERY_RESULT_64_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkQueryResultFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkQueryResultFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_QUERY_RESULT_PARTIAL_BIT)
- {
+ if (enumerator & VK_QUERY_RESULT_PARTIAL_BIT) {
strings.push_back("VK_QUERY_RESULT_PARTIAL_BIT");
}
- if(enumerator & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT)
- {
+ if (enumerator & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT) {
strings.push_back("VK_QUERY_RESULT_WITH_AVAILABILITY_BIT");
}
- if(enumerator & VK_QUERY_RESULT_WAIT_BIT)
- {
+ if (enumerator & VK_QUERY_RESULT_WAIT_BIT) {
strings.push_back("VK_QUERY_RESULT_WAIT_BIT");
}
- if(enumerator & VK_QUERY_RESULT_64_BIT)
- {
+ if (enumerator & VK_QUERY_RESULT_64_BIT) {
strings.push_back("VK_QUERY_RESULT_64_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -893,79 +759,60 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkBufferUsageFlagBits const& enumerator)
-{
- VkBufferUsageFlagBits allFlags = (VkBufferUsageFlagBits)(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
- VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
+static bool ValidateEnumerator(VkBufferUsageFlagBits const &enumerator) {
+ VkBufferUsageFlagBits allFlags = (VkBufferUsageFlagBits)(
+ VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT |
VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT |
VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
- VK_BUFFER_USAGE_STORAGE_BUFFER_BIT |
- VK_BUFFER_USAGE_TRANSFER_DST_BIT |
+ VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT |
VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
- VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
- VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
- if(enumerator & (~allFlags))
- {
+ VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkBufferUsageFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkBufferUsageFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) {
strings.push_back("VK_BUFFER_USAGE_VERTEX_BUFFER_BIT");
}
- if(enumerator & VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) {
strings.push_back("VK_BUFFER_USAGE_INDEX_BUFFER_BIT");
}
- if(enumerator & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT) {
strings.push_back("VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT");
}
- if(enumerator & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) {
strings.push_back("VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT");
}
- if(enumerator & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) {
strings.push_back("VK_BUFFER_USAGE_STORAGE_BUFFER_BIT");
}
- if(enumerator & VK_BUFFER_USAGE_TRANSFER_DST_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_TRANSFER_DST_BIT) {
strings.push_back("VK_BUFFER_USAGE_TRANSFER_DST_BIT");
}
- if(enumerator & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) {
strings.push_back("VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT");
}
- if(enumerator & VK_BUFFER_USAGE_TRANSFER_SRC_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_TRANSFER_SRC_BIT) {
strings.push_back("VK_BUFFER_USAGE_TRANSFER_SRC_BIT");
}
- if(enumerator & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)
- {
+ if (enumerator & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) {
strings.push_back("VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -973,49 +820,39 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkBufferCreateFlagBits const& enumerator)
-{
- VkBufferCreateFlagBits allFlags = (VkBufferCreateFlagBits)(VK_BUFFER_CREATE_SPARSE_ALIASED_BIT |
- VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT |
- VK_BUFFER_CREATE_SPARSE_BINDING_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkBufferCreateFlagBits const &enumerator) {
+ VkBufferCreateFlagBits allFlags =
+ (VkBufferCreateFlagBits)(VK_BUFFER_CREATE_SPARSE_ALIASED_BIT |
+ VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT |
+ VK_BUFFER_CREATE_SPARSE_BINDING_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkBufferCreateFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkBufferCreateFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT)
- {
+ if (enumerator & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT) {
strings.push_back("VK_BUFFER_CREATE_SPARSE_ALIASED_BIT");
}
- if(enumerator & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT)
- {
+ if (enumerator & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) {
strings.push_back("VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT");
}
- if(enumerator & VK_BUFFER_CREATE_SPARSE_BINDING_BIT)
- {
+ if (enumerator & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) {
strings.push_back("VK_BUFFER_CREATE_SPARSE_BINDING_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1023,59 +860,47 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkImageCreateFlagBits const& enumerator)
-{
- VkImageCreateFlagBits allFlags = (VkImageCreateFlagBits)(VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT |
- VK_IMAGE_CREATE_SPARSE_ALIASED_BIT |
- VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT |
- VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
- VK_IMAGE_CREATE_SPARSE_BINDING_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkImageCreateFlagBits const &enumerator) {
+ VkImageCreateFlagBits allFlags =
+ (VkImageCreateFlagBits)(VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT |
+ VK_IMAGE_CREATE_SPARSE_ALIASED_BIT |
+ VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT |
+ VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
+ VK_IMAGE_CREATE_SPARSE_BINDING_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkImageCreateFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkImageCreateFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
- {
+ if (enumerator & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) {
strings.push_back("VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT");
}
- if(enumerator & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT)
- {
+ if (enumerator & VK_IMAGE_CREATE_SPARSE_ALIASED_BIT) {
strings.push_back("VK_IMAGE_CREATE_SPARSE_ALIASED_BIT");
}
- if(enumerator & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT)
- {
+ if (enumerator & VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT) {
strings.push_back("VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT");
}
- if(enumerator & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT)
- {
+ if (enumerator & VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT) {
strings.push_back("VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT");
}
- if(enumerator & VK_IMAGE_CREATE_SPARSE_BINDING_BIT)
- {
+ if (enumerator & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) {
strings.push_back("VK_IMAGE_CREATE_SPARSE_BINDING_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1083,54 +908,42 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkColorComponentFlagBits const& enumerator)
-{
- VkColorComponentFlagBits allFlags = (VkColorComponentFlagBits)(VK_COLOR_COMPONENT_A_BIT |
- VK_COLOR_COMPONENT_B_BIT |
- VK_COLOR_COMPONENT_G_BIT |
- VK_COLOR_COMPONENT_R_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkColorComponentFlagBits const &enumerator) {
+ VkColorComponentFlagBits allFlags = (VkColorComponentFlagBits)(
+ VK_COLOR_COMPONENT_A_BIT | VK_COLOR_COMPONENT_B_BIT |
+ VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_R_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkColorComponentFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkColorComponentFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_COLOR_COMPONENT_A_BIT)
- {
+ if (enumerator & VK_COLOR_COMPONENT_A_BIT) {
strings.push_back("VK_COLOR_COMPONENT_A_BIT");
}
- if(enumerator & VK_COLOR_COMPONENT_B_BIT)
- {
+ if (enumerator & VK_COLOR_COMPONENT_B_BIT) {
strings.push_back("VK_COLOR_COMPONENT_B_BIT");
}
- if(enumerator & VK_COLOR_COMPONENT_G_BIT)
- {
+ if (enumerator & VK_COLOR_COMPONENT_G_BIT) {
strings.push_back("VK_COLOR_COMPONENT_G_BIT");
}
- if(enumerator & VK_COLOR_COMPONENT_R_BIT)
- {
+ if (enumerator & VK_COLOR_COMPONENT_R_BIT) {
strings.push_back("VK_COLOR_COMPONENT_R_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1138,49 +951,40 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkPipelineCreateFlagBits const& enumerator)
-{
- VkPipelineCreateFlagBits allFlags = (VkPipelineCreateFlagBits)(VK_PIPELINE_CREATE_DERIVATIVE_BIT |
- VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT |
- VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkPipelineCreateFlagBits const &enumerator) {
+ VkPipelineCreateFlagBits allFlags =
+ (VkPipelineCreateFlagBits)(VK_PIPELINE_CREATE_DERIVATIVE_BIT |
+ VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT |
+ VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkPipelineCreateFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkPipelineCreateFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_PIPELINE_CREATE_DERIVATIVE_BIT)
- {
+ if (enumerator & VK_PIPELINE_CREATE_DERIVATIVE_BIT) {
strings.push_back("VK_PIPELINE_CREATE_DERIVATIVE_BIT");
}
- if(enumerator & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)
- {
+ if (enumerator & VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT) {
strings.push_back("VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT");
}
- if(enumerator & VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT)
- {
+ if (enumerator & VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT) {
strings.push_back("VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1188,69 +992,52 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkShaderStageFlagBits const& enumerator)
-{
- VkShaderStageFlagBits allFlags = (VkShaderStageFlagBits)(VK_SHADER_STAGE_ALL |
- VK_SHADER_STAGE_FRAGMENT_BIT |
- VK_SHADER_STAGE_GEOMETRY_BIT |
- VK_SHADER_STAGE_COMPUTE_BIT |
+static bool ValidateEnumerator(VkShaderStageFlagBits const &enumerator) {
+ VkShaderStageFlagBits allFlags = (VkShaderStageFlagBits)(
+ VK_SHADER_STAGE_ALL | VK_SHADER_STAGE_FRAGMENT_BIT |
+ VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_COMPUTE_BIT |
VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT |
- VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |
- VK_SHADER_STAGE_VERTEX_BIT);
- if(enumerator & (~allFlags))
- {
+ VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_VERTEX_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkShaderStageFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkShaderStageFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_SHADER_STAGE_ALL)
- {
+ if (enumerator & VK_SHADER_STAGE_ALL) {
strings.push_back("VK_SHADER_STAGE_ALL");
}
- if(enumerator & VK_SHADER_STAGE_FRAGMENT_BIT)
- {
+ if (enumerator & VK_SHADER_STAGE_FRAGMENT_BIT) {
strings.push_back("VK_SHADER_STAGE_FRAGMENT_BIT");
}
- if(enumerator & VK_SHADER_STAGE_GEOMETRY_BIT)
- {
+ if (enumerator & VK_SHADER_STAGE_GEOMETRY_BIT) {
strings.push_back("VK_SHADER_STAGE_GEOMETRY_BIT");
}
- if(enumerator & VK_SHADER_STAGE_COMPUTE_BIT)
- {
+ if (enumerator & VK_SHADER_STAGE_COMPUTE_BIT) {
strings.push_back("VK_SHADER_STAGE_COMPUTE_BIT");
}
- if(enumerator & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
- {
+ if (enumerator & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) {
strings.push_back("VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT");
}
- if(enumerator & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
- {
+ if (enumerator & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
strings.push_back("VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT");
}
- if(enumerator & VK_SHADER_STAGE_VERTEX_BIT)
- {
+ if (enumerator & VK_SHADER_STAGE_VERTEX_BIT) {
strings.push_back("VK_SHADER_STAGE_VERTEX_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1258,15 +1045,11 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkPipelineStageFlagBits const& enumerator)
-{
+static bool ValidateEnumerator(VkPipelineStageFlagBits const &enumerator) {
VkPipelineStageFlagBits allFlags = (VkPipelineStageFlagBits)(
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT |
- VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT |
- VK_PIPELINE_STAGE_HOST_BIT |
- VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT |
- VK_PIPELINE_STAGE_TRANSFER_BIT |
+ VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | VK_PIPELINE_STAGE_HOST_BIT |
+ VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT |
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT |
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT |
VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT |
@@ -1279,99 +1062,77 @@
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT |
VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT |
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
- if(enumerator & (~allFlags))
- {
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkPipelineStageFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkPipelineStageFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) {
strings.push_back("VK_PIPELINE_STAGE_ALL_COMMANDS_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT) {
strings.push_back("VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_HOST_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_HOST_BIT) {
strings.push_back("VK_PIPELINE_STAGE_HOST_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_TRANSFER_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_TRANSFER_BIT) {
strings.push_back("VK_PIPELINE_STAGE_TRANSFER_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT) {
strings.push_back("VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT) {
strings.push_back("VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT) {
strings.push_back("VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT) {
strings.push_back("VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT) {
strings.push_back("VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT) {
strings.push_back("VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT) {
strings.push_back("VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_VERTEX_SHADER_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_VERTEX_SHADER_BIT) {
strings.push_back("VK_PIPELINE_STAGE_VERTEX_SHADER_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT)
- {
- strings.push_back("VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT");
+ if (enumerator & VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT) {
+ strings.push_back(
+ "VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_VERTEX_INPUT_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_VERTEX_INPUT_BIT) {
strings.push_back("VK_PIPELINE_STAGE_VERTEX_INPUT_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT) {
strings.push_back("VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT) {
strings.push_back("VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT");
}
- if(enumerator & VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT)
- {
+ if (enumerator & VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT) {
strings.push_back("VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1379,121 +1140,89 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkAccessFlagBits const& enumerator)
-{
+static bool ValidateEnumerator(VkAccessFlagBits const &enumerator) {
VkAccessFlagBits allFlags = (VkAccessFlagBits)(
- VK_ACCESS_INDIRECT_COMMAND_READ_BIT |
- VK_ACCESS_INDEX_READ_BIT |
- VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT |
- VK_ACCESS_UNIFORM_READ_BIT |
- VK_ACCESS_INPUT_ATTACHMENT_READ_BIT |
- VK_ACCESS_SHADER_READ_BIT |
- VK_ACCESS_SHADER_WRITE_BIT |
- VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
- VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
- VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
- VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
- VK_ACCESS_TRANSFER_READ_BIT |
- VK_ACCESS_TRANSFER_WRITE_BIT |
- VK_ACCESS_HOST_READ_BIT |
- VK_ACCESS_HOST_WRITE_BIT |
- VK_ACCESS_MEMORY_READ_BIT |
- VK_ACCESS_MEMORY_WRITE_BIT);
+ VK_ACCESS_INDIRECT_COMMAND_READ_BIT | VK_ACCESS_INDEX_READ_BIT |
+ VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT |
+ VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT |
+ VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
+ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT |
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
+ VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT |
+ VK_ACCESS_HOST_READ_BIT | VK_ACCESS_HOST_WRITE_BIT |
+ VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT);
- if(enumerator & (~allFlags))
- {
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkAccessFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkAccessFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_ACCESS_INDIRECT_COMMAND_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_INDIRECT_COMMAND_READ_BIT) {
strings.push_back("VK_ACCESS_INDIRECT_COMMAND_READ_BIT");
}
- if(enumerator & VK_ACCESS_INDEX_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_INDEX_READ_BIT) {
strings.push_back("VK_ACCESS_INDEX_READ_BIT");
}
- if(enumerator & VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT) {
strings.push_back("VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT");
}
- if(enumerator & VK_ACCESS_UNIFORM_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_UNIFORM_READ_BIT) {
strings.push_back("VK_ACCESS_UNIFORM_READ_BIT");
}
- if(enumerator & VK_ACCESS_INPUT_ATTACHMENT_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_INPUT_ATTACHMENT_READ_BIT) {
strings.push_back("VK_ACCESS_INPUT_ATTACHMENT_READ_BIT");
}
- if(enumerator & VK_ACCESS_SHADER_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_SHADER_READ_BIT) {
strings.push_back("VK_ACCESS_SHADER_READ_BIT");
}
- if(enumerator & VK_ACCESS_SHADER_WRITE_BIT)
- {
+ if (enumerator & VK_ACCESS_SHADER_WRITE_BIT) {
strings.push_back("VK_ACCESS_SHADER_WRITE_BIT");
}
- if(enumerator & VK_ACCESS_COLOR_ATTACHMENT_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_COLOR_ATTACHMENT_READ_BIT) {
strings.push_back("VK_ACCESS_COLOR_ATTACHMENT_READ_BIT");
}
- if(enumerator & VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT)
- {
+ if (enumerator & VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT) {
strings.push_back("VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT");
}
- if(enumerator & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT) {
strings.push_back("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT");
}
- if(enumerator & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT)
- {
+ if (enumerator & VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT) {
strings.push_back("VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT");
}
- if(enumerator & VK_ACCESS_TRANSFER_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_TRANSFER_READ_BIT) {
strings.push_back("VK_ACCESS_TRANSFER_READ_BIT");
}
- if(enumerator & VK_ACCESS_TRANSFER_WRITE_BIT)
- {
+ if (enumerator & VK_ACCESS_TRANSFER_WRITE_BIT) {
strings.push_back("VK_ACCESS_TRANSFER_WRITE_BIT");
}
- if(enumerator & VK_ACCESS_HOST_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_HOST_READ_BIT) {
strings.push_back("VK_ACCESS_HOST_READ_BIT");
}
- if(enumerator & VK_ACCESS_HOST_WRITE_BIT)
- {
+ if (enumerator & VK_ACCESS_HOST_WRITE_BIT) {
strings.push_back("VK_ACCESS_HOST_WRITE_BIT");
}
- if(enumerator & VK_ACCESS_MEMORY_READ_BIT)
- {
+ if (enumerator & VK_ACCESS_MEMORY_READ_BIT) {
strings.push_back("VK_ACCESS_MEMORY_READ_BIT");
}
- if(enumerator & VK_ACCESS_MEMORY_WRITE_BIT)
- {
+ if (enumerator & VK_ACCESS_MEMORY_WRITE_BIT) {
strings.push_back("VK_ACCESS_MEMORY_WRITE_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1501,44 +1230,36 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkCommandPoolCreateFlagBits const& enumerator)
-{
- VkCommandPoolCreateFlagBits allFlags = (VkCommandPoolCreateFlagBits)(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT |
+static bool ValidateEnumerator(VkCommandPoolCreateFlagBits const &enumerator) {
+ VkCommandPoolCreateFlagBits allFlags = (VkCommandPoolCreateFlagBits)(
+ VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT |
VK_COMMAND_POOL_CREATE_TRANSIENT_BIT);
- if(enumerator & (~allFlags))
- {
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkCommandPoolCreateFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkCommandPoolCreateFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT)
- {
+ if (enumerator & VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT) {
strings.push_back("VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT");
}
- if(enumerator & VK_COMMAND_POOL_CREATE_TRANSIENT_BIT)
- {
+ if (enumerator & VK_COMMAND_POOL_CREATE_TRANSIENT_BIT) {
strings.push_back("VK_COMMAND_POOL_CREATE_TRANSIENT_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1546,39 +1267,32 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkCommandPoolResetFlagBits const& enumerator)
-{
- VkCommandPoolResetFlagBits allFlags = (VkCommandPoolResetFlagBits)(VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkCommandPoolResetFlagBits const &enumerator) {
+ VkCommandPoolResetFlagBits allFlags = (VkCommandPoolResetFlagBits)(
+ VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkCommandPoolResetFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkCommandPoolResetFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT)
- {
+ if (enumerator & VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT) {
strings.push_back("VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1586,49 +1300,40 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkCommandBufferUsageFlags const& enumerator)
-{
- VkCommandBufferUsageFlags allFlags = (VkCommandBufferUsageFlags)(VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT |
+static bool ValidateEnumerator(VkCommandBufferUsageFlags const &enumerator) {
+ VkCommandBufferUsageFlags allFlags = (VkCommandBufferUsageFlags)(
+ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT |
VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT |
VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT);
- if(enumerator & (~allFlags))
- {
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkCommandBufferUsageFlags const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkCommandBufferUsageFlags const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)
- {
+ if (enumerator & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
strings.push_back("VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT");
}
- if(enumerator & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT)
- {
+ if (enumerator & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) {
strings.push_back("VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT");
}
- if(enumerator & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)
- {
+ if (enumerator & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT) {
strings.push_back("VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1636,39 +1341,32 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkCommandBufferResetFlagBits const& enumerator)
-{
- VkCommandBufferResetFlagBits allFlags = (VkCommandBufferResetFlagBits)(VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkCommandBufferResetFlagBits const &enumerator) {
+ VkCommandBufferResetFlagBits allFlags = (VkCommandBufferResetFlagBits)(
+ VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkCommandBufferResetFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string
+EnumeratorString(VkCommandBufferResetFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT)
- {
+ if (enumerator & VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT) {
strings.push_back("VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1676,54 +1374,41 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkImageAspectFlagBits const& enumerator)
-{
- VkImageAspectFlagBits allFlags = (VkImageAspectFlagBits)(VK_IMAGE_ASPECT_METADATA_BIT |
- VK_IMAGE_ASPECT_STENCIL_BIT |
- VK_IMAGE_ASPECT_DEPTH_BIT |
- VK_IMAGE_ASPECT_COLOR_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkImageAspectFlagBits const &enumerator) {
+ VkImageAspectFlagBits allFlags = (VkImageAspectFlagBits)(
+ VK_IMAGE_ASPECT_METADATA_BIT | VK_IMAGE_ASPECT_STENCIL_BIT |
+ VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_COLOR_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkImageAspectFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkImageAspectFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_IMAGE_ASPECT_METADATA_BIT)
- {
+ if (enumerator & VK_IMAGE_ASPECT_METADATA_BIT) {
strings.push_back("VK_IMAGE_ASPECT_METADATA_BIT");
}
- if(enumerator & VK_IMAGE_ASPECT_STENCIL_BIT)
- {
+ if (enumerator & VK_IMAGE_ASPECT_STENCIL_BIT) {
strings.push_back("VK_IMAGE_ASPECT_STENCIL_BIT");
}
- if(enumerator & VK_IMAGE_ASPECT_DEPTH_BIT)
- {
+ if (enumerator & VK_IMAGE_ASPECT_DEPTH_BIT) {
strings.push_back("VK_IMAGE_ASPECT_DEPTH_BIT");
}
- if(enumerator & VK_IMAGE_ASPECT_COLOR_BIT)
- {
+ if (enumerator & VK_IMAGE_ASPECT_COLOR_BIT) {
strings.push_back("VK_IMAGE_ASPECT_COLOR_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1731,39 +1416,31 @@
return enumeratorString;
}
-static
-bool ValidateEnumerator(VkQueryControlFlagBits const& enumerator)
-{
- VkQueryControlFlagBits allFlags = (VkQueryControlFlagBits)(VK_QUERY_CONTROL_PRECISE_BIT);
- if(enumerator & (~allFlags))
- {
+static bool ValidateEnumerator(VkQueryControlFlagBits const &enumerator) {
+ VkQueryControlFlagBits allFlags =
+ (VkQueryControlFlagBits)(VK_QUERY_CONTROL_PRECISE_BIT);
+ if (enumerator & (~allFlags)) {
return false;
}
return true;
}
-static
-std::string EnumeratorString(VkQueryControlFlagBits const& enumerator)
-{
- if(!ValidateEnumerator(enumerator))
- {
+static std::string EnumeratorString(VkQueryControlFlagBits const &enumerator) {
+ if (!ValidateEnumerator(enumerator)) {
return "unrecognized enumerator";
}
std::vector<std::string> strings;
- if(enumerator & VK_QUERY_CONTROL_PRECISE_BIT)
- {
+ if (enumerator & VK_QUERY_CONTROL_PRECISE_BIT) {
strings.push_back("VK_QUERY_CONTROL_PRECISE_BIT");
}
std::string enumeratorString;
- for(auto const& string : strings)
- {
+ for (auto const &string : strings) {
enumeratorString += string;
- if(string != strings.back())
- {
+ if (string != strings.back()) {
enumeratorString += '|';
}
}
@@ -1771,16 +1448,18 @@
return enumeratorString;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(
- const VkInstanceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkInstance* pInstance)
-{
- VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkInstance *pInstance) {
+ VkLayerInstanceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkCreateInstance fpCreateInstance =
+ (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
if (fpCreateInstance == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -1792,27 +1471,27 @@
if (result != VK_SUCCESS)
return result;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
- VkLayerInstanceDispatchTable *pTable = initInstanceTable(*pInstance, fpGetInstanceProcAddr, pc_instance_table_map);
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
+ VkLayerInstanceDispatchTable *pTable = initInstanceTable(
+ *pInstance, fpGetInstanceProcAddr, pc_instance_table_map);
my_data->report_data = debug_report_create_instance(
- pTable,
- *pInstance,
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
+ pTable, *pInstance, pCreateInfo->enabledExtensionCount,
+ pCreateInfo->ppEnabledExtensionNames);
InitParamChecker(my_data, pAllocator);
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(
- VkInstance instance,
- const VkAllocationCallbacks* pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyInstance(VkInstance instance,
+ const VkAllocationCallbacks *pAllocator) {
// Grab the key before the instance is destroyed.
dispatch_key key = get_dispatch_key(instance);
- VkLayerInstanceDispatchTable *pTable = get_dispatch_table(pc_instance_table_map, instance);
+ VkLayerInstanceDispatchTable *pTable =
+ get_dispatch_table(pc_instance_table_map, instance);
pTable->DestroyInstance(instance, pAllocator);
// Clean up logging callback, if any
@@ -1829,223 +1508,213 @@
pc_instance_table_map.erase(key);
}
-bool PostEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices,
- VkResult result)
-{
+bool PostEnumeratePhysicalDevices(VkInstance instance,
+ uint32_t *pPhysicalDeviceCount,
+ VkPhysicalDevice *pPhysicalDevices,
+ VkResult result) {
- if(pPhysicalDeviceCount != nullptr)
- {
+ if (pPhysicalDeviceCount != nullptr) {
}
- if(pPhysicalDevices != nullptr)
- {
+ if (pPhysicalDevices != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkEnumeratePhysicalDevices parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mid(instance), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkEnumeratePhysicalDevices parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mid(instance), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(
- VkInstance instance,
- uint32_t* pPhysicalDeviceCount,
- VkPhysicalDevice* pPhysicalDevices)
-{
- VkResult result = get_dispatch_table(pc_instance_table_map, instance)->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumeratePhysicalDevices(VkInstance instance,
+ uint32_t *pPhysicalDeviceCount,
+ VkPhysicalDevice *pPhysicalDevices) {
+ VkResult result = get_dispatch_table(pc_instance_table_map, instance)
+ ->EnumeratePhysicalDevices(
+ instance, pPhysicalDeviceCount, pPhysicalDevices);
- PostEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices, result);
+ PostEnumeratePhysicalDevices(instance, pPhysicalDeviceCount,
+ pPhysicalDevices, result);
return result;
}
-bool PostGetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures)
-{
+bool PostGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceFeatures *pFeatures) {
- if(pFeatures != nullptr)
- {
+ if (pFeatures != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceFeatures* pFeatures)
-{
- get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceFeatures *pFeatures) {
+ get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceFeatures(physicalDevice, pFeatures);
PostGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
}
-bool PostGetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties)
-{
+bool
+PostGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,
+ VkFormat format,
+ VkFormatProperties *pFormatProperties) {
- if(format < VK_FORMAT_BEGIN_RANGE ||
- format > VK_FORMAT_END_RANGE)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceFormatProperties parameter, VkFormat format, is an unrecognized enumerator");
+ if (format < VK_FORMAT_BEGIN_RANGE || format > VK_FORMAT_END_RANGE) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceFormatProperties parameter, VkFormat "
+ "format, is an unrecognized enumerator");
return false;
}
- if(pFormatProperties != nullptr)
- {
+ if (pFormatProperties != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkFormatProperties* pFormatProperties)
-{
- get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,
+ VkFormat format,
+ VkFormatProperties *pFormatProperties) {
+ get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceFormatProperties(physicalDevice, format,
+ pFormatProperties);
- PostGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+ PostGetPhysicalDeviceFormatProperties(physicalDevice, format,
+ pFormatProperties);
}
bool PostGetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties,
- VkResult result)
-{
+ VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+ VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
+ VkImageFormatProperties *pImageFormatProperties, VkResult result) {
- if(format < VK_FORMAT_BEGIN_RANGE ||
- format > VK_FORMAT_END_RANGE)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceImageFormatProperties parameter, VkFormat format, is an unrecognized enumerator");
+ if (format < VK_FORMAT_BEGIN_RANGE || format > VK_FORMAT_END_RANGE) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceImageFormatProperties parameter, VkFormat "
+ "format, is an unrecognized enumerator");
return false;
}
- if(type < VK_IMAGE_TYPE_BEGIN_RANGE ||
- type > VK_IMAGE_TYPE_END_RANGE)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceImageFormatProperties parameter, VkImageType type, is an unrecognized enumerator");
+ if (type < VK_IMAGE_TYPE_BEGIN_RANGE || type > VK_IMAGE_TYPE_END_RANGE) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceImageFormatProperties parameter, "
+ "VkImageType type, is an unrecognized enumerator");
return false;
}
- if(tiling < VK_IMAGE_TILING_BEGIN_RANGE ||
- tiling > VK_IMAGE_TILING_END_RANGE)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceImageFormatProperties parameter, VkImageTiling tiling, is an unrecognized enumerator");
+ if (tiling < VK_IMAGE_TILING_BEGIN_RANGE ||
+ tiling > VK_IMAGE_TILING_END_RANGE) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceImageFormatProperties parameter, "
+ "VkImageTiling tiling, is an unrecognized enumerator");
return false;
}
-
- if(pImageFormatProperties != nullptr)
- {
+ if (pImageFormatProperties != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkGetPhysicalDeviceImageFormatProperties parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkGetPhysicalDeviceImageFormatProperties "
+ "parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkImageTiling tiling,
- VkImageUsageFlags usage,
- VkImageCreateFlags flags,
- VkImageFormatProperties* pImageFormatProperties)
-{
- VkResult result = get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetPhysicalDeviceImageFormatProperties(
+ VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+ VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
+ VkImageFormatProperties *pImageFormatProperties) {
+ VkResult result = get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceImageFormatProperties(
+ physicalDevice, format, type, tiling, usage,
+ flags, pImageFormatProperties);
- PostGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties, result);
+ PostGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type,
+ tiling, usage, flags,
+ pImageFormatProperties, result);
return result;
}
-bool PostGetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties)
-{
+bool PostGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceProperties *pProperties) {
- if(pProperties != nullptr)
- {
- if(pProperties->deviceType < VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE ||
- pProperties->deviceType > VK_PHYSICAL_DEVICE_TYPE_END_RANGE)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceProperties parameter, VkPhysicalDeviceType pProperties->deviceType, is an unrecognized enumerator");
- return false;
- }
+ if (pProperties != nullptr) {
+ if (pProperties->deviceType < VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE ||
+ pProperties->deviceType > VK_PHYSICAL_DEVICE_TYPE_END_RANGE) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceProperties parameter, "
+ "VkPhysicalDeviceType pProperties->deviceType, is an "
+ "unrecognized enumerator");
+ return false;
+ }
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(
- VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceProperties* pProperties)
-{
- get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceProperties(physicalDevice, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
+ VkPhysicalDeviceProperties *pProperties) {
+ get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceProperties(physicalDevice, pProperties);
PostGetPhysicalDeviceProperties(physicalDevice, pProperties);
}
bool PostGetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCount,
- VkQueueFamilyProperties* pQueueProperties)
-{
+ VkPhysicalDevice physicalDevice, uint32_t *pCount,
+ VkQueueFamilyProperties *pQueueProperties) {
- if(pQueueProperties == nullptr && pCount != nullptr)
- {
+ if (pQueueProperties == nullptr && pCount != nullptr) {
}
- if(pQueueProperties != nullptr)
- {
+ if (pQueueProperties != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pCount,
- VkQueueFamilyProperties* pQueueProperties)
-{
- get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pCount, pQueueProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceQueueFamilyProperties(
+ VkPhysicalDevice physicalDevice, uint32_t *pCount,
+ VkQueueFamilyProperties *pQueueProperties) {
+ get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pCount,
+ pQueueProperties);
- PostGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pCount, pQueueProperties);
+ PostGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pCount,
+ pQueueProperties);
}
bool PostGetPhysicalDeviceMemoryProperties(
VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties)
-{
+ VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
- if(pMemoryProperties != nullptr)
- {
+ if (pMemoryProperties != nullptr) {
}
return true;
@@ -2053,66 +1722,99 @@
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(
VkPhysicalDevice physicalDevice,
- VkPhysicalDeviceMemoryProperties* pMemoryProperties)
-{
- get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+ VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
+ get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
PostGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
}
-void validateDeviceCreateInfo(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const std::vector<VkQueueFamilyProperties> properties) {
+void validateDeviceCreateInfo(
+ VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+ const std::vector<VkQueueFamilyProperties> properties) {
std::unordered_set<uint32_t> set;
for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
if (set.count(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex)) {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueFamilyIndex, is not unique within this structure.", i);
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "VkDeviceCreateInfo parameter, uint32_t "
+ "pQueueCreateInfos[%d]->queueFamilyIndex, is not unique "
+ "within this structure.",
+ i);
} else {
set.insert(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex);
}
if (pCreateInfo->pQueueCreateInfos[i].queueCount == 0) {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueCount, cannot be zero.", i);
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "VkDeviceCreateInfo parameter, uint32_t "
+ "pQueueCreateInfos[%d]->queueCount, cannot be zero.",
+ i);
}
- for (uint32_t j = 0; j < pCreateInfo->pQueueCreateInfos[i].queueCount; ++j) {
- if (pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j] < 0.f || pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j] > 1.f) {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->pQueuePriorities[%d], must be between 0 and 1. Actual value is %f", i, j, pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j]);
+ for (uint32_t j = 0; j < pCreateInfo->pQueueCreateInfos[i].queueCount;
+ ++j) {
+ if (pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j] < 0.f ||
+ pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j] > 1.f) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "VkDeviceCreateInfo parameter, uint32_t "
+ "pQueueCreateInfos[%d]->pQueuePriorities["
+ "%d], must be between 0 and 1. Actual "
+ "value is %f",
+ i, j,
+ pCreateInfo->pQueueCreateInfos[i].pQueuePriorities[j]);
}
}
- if (pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex >= properties.size()) {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueFamilyIndex cannot be more than the number of queue families.", i);
- } else if (pCreateInfo->pQueueCreateInfos[i].queueCount > properties[pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex].queueCount) {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "VkDeviceCreateInfo parameter, uint32_t pQueueCreateInfos[%d]->queueCount cannot be more than the number of queues for the given family index.", i);
+ if (pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex >=
+ properties.size()) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "VkDeviceCreateInfo parameter, uint32_t "
+ "pQueueCreateInfos[%d]->queueFamilyIndex cannot be more "
+ "than the number of queue families.",
+ i);
+ } else if (pCreateInfo->pQueueCreateInfos[i].queueCount >
+ properties[pCreateInfo->pQueueCreateInfos[i]
+ .queueFamilyIndex].queueCount) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "VkDeviceCreateInfo parameter, uint32_t "
+ "pQueueCreateInfos[%d]->queueCount cannot be more than the "
+ "number of queues for the given family index.",
+ i);
}
}
}
-void storeCreateDeviceData(VkDevice device, const VkDeviceCreateInfo* pCreateInfo) {
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+void storeCreateDeviceData(VkDevice device,
+ const VkDeviceCreateInfo *pCreateInfo) {
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
my_device_data->queueFamilyIndexMap.insert(
- std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex, pCreateInfo->pQueueCreateInfos[i].queueCount));
+ std::make_pair(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex,
+ pCreateInfo->pQueueCreateInfos[i].queueCount));
}
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(
- VkPhysicalDevice physicalDevice,
- const VkDeviceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDevice* pDevice)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDevice(VkPhysicalDevice physicalDevice,
+ const VkDeviceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
/*
* NOTE: We do not validate physicalDevice or any dispatchable
* object as the first parameter. We couldn't get here if it was wrong!
*/
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+ VkLayerDeviceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+ PFN_vkCreateDevice fpCreateDevice =
+ (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
if (fpCreateDevice == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -2120,20 +1822,28 @@
// Advance the link info for the next element on the chain
chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
- VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
+ VkResult result =
+ fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
if (result != VK_SUCCESS) {
return result;
}
- layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
- my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
+ layer_data *my_instance_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
+ my_device_data->report_data = layer_debug_report_create_device(
+ my_instance_data->report_data, *pDevice);
initDeviceTable(*pDevice, fpGetDeviceProcAddr, pc_device_table_map);
uint32_t count;
- get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count, nullptr);
+ get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count,
+ nullptr);
std::vector<VkQueueFamilyProperties> properties(count);
- get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count, &properties[0]);
+ get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceQueueFamilyProperties(physicalDevice, &count,
+ &properties[0]);
validateDeviceCreateInfo(physicalDevice, pCreateInfo, properties);
storeCreateDeviceData(*pDevice, pCreateInfo);
@@ -2141,10 +1851,8 @@
return result;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(
- VkDevice device,
- const VkAllocationCallbacks* pAllocator)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
layer_debug_report_destroy_device(device);
dispatch_key key = get_dispatch_key(device);
@@ -2152,2196 +1860,2178 @@
fprintf(stderr, "Device: %p, key: %p\n", device, key);
#endif
- get_dispatch_table(pc_device_table_map, device)->DestroyDevice(device, pAllocator);
+ get_dispatch_table(pc_device_table_map, device)
+ ->DestroyDevice(device, pAllocator);
pc_device_table_map.erase(key);
}
-bool PreGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex)
-{
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- auto queue_data = my_device_data->queueFamilyIndexMap.find(queueFamilyIndex);
+bool PreGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex,
+ uint32_t queueIndex) {
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ auto queue_data =
+ my_device_data->queueFamilyIndexMap.find(queueFamilyIndex);
if (queue_data == my_device_data->queueFamilyIndexMap.end()) {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "VkGetDeviceQueue parameter, uint32_t queueFamilyIndex %d, must have been given when the device was created.", queueFamilyIndex);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "VkGetDeviceQueue parameter, uint32_t queueFamilyIndex %d, "
+ "must have been given when the device was created.",
+ queueFamilyIndex);
return false;
}
if (queue_data->second <= queueIndex) {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "VkGetDeviceQueue parameter, uint32_t queueIndex %d, must be less than the number of queues given when the device was created.", queueIndex);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "VkGetDeviceQueue parameter, uint32_t queueIndex %d, must be "
+ "less than the number of queues given when the device was "
+ "created.",
+ queueIndex);
return false;
}
return true;
}
-bool PostGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue)
-{
+bool PostGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex,
+ uint32_t queueIndex, VkQueue *pQueue) {
-
-
- if(pQueue != nullptr)
- {
+ if (pQueue != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex,
+ uint32_t queueIndex, VkQueue *pQueue) {
PreGetDeviceQueue(device, queueFamilyIndex, queueIndex);
- get_dispatch_table(pc_device_table_map, device)->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
PostGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
}
-bool PreQueueSubmit(
- VkQueue queue,
- const VkSubmitInfo* submit)
-{
- if(submit->sType != VK_STRUCTURE_TYPE_SUBMIT_INFO) {
- log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkQueueSubmit parameter, VkStructureType pSubmits->sType, is an invalid enumerator");
+bool PreQueueSubmit(VkQueue queue, const VkSubmitInfo *submit) {
+ if (submit->sType != VK_STRUCTURE_TYPE_SUBMIT_INFO) {
+ log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkQueueSubmit parameter, VkStructureType pSubmits->sType, is "
+ "an invalid enumerator");
return false;
}
- if(submit->pCommandBuffers != nullptr)
- {
+ if (submit->pCommandBuffers != nullptr) {
}
return true;
}
-bool PostQueueSubmit(
- VkQueue queue,
- uint32_t commandBufferCount,
- VkFence fence,
- VkResult result)
-{
+bool PostQueueSubmit(VkQueue queue, uint32_t commandBufferCount, VkFence fence,
+ VkResult result) {
-
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkQueueSubmit parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkQueueSubmit parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(
- VkQueue queue,
- uint32_t submitCount,
- const VkSubmitInfo* pSubmits,
- VkFence fence)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueueSubmit(VkQueue queue, uint32_t submitCount,
+ const VkSubmitInfo *pSubmits, VkFence fence) {
for (uint32_t i = 0; i < submitCount; i++) {
PreQueueSubmit(queue, &pSubmits[i]);
}
- VkResult result = get_dispatch_table(pc_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
+ VkResult result = get_dispatch_table(pc_device_table_map, queue)
+ ->QueueSubmit(queue, submitCount, pSubmits, fence);
PostQueueSubmit(queue, submitCount, fence, result);
return result;
}
-bool PostQueueWaitIdle(
- VkQueue queue,
- VkResult result)
-{
+bool PostQueueWaitIdle(VkQueue queue, VkResult result) {
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkQueueWaitIdle parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkQueueWaitIdle parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(
- VkQueue queue)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, queue)->QueueWaitIdle(queue);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue) {
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, queue)->QueueWaitIdle(queue);
PostQueueWaitIdle(queue, result);
return result;
}
-bool PostDeviceWaitIdle(
- VkDevice device,
- VkResult result)
-{
+bool PostDeviceWaitIdle(VkDevice device, VkResult result) {
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkDeviceWaitIdle parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkDeviceWaitIdle parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(
- VkDevice device)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->DeviceWaitIdle(device);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkDeviceWaitIdle(VkDevice device) {
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)->DeviceWaitIdle(device);
PostDeviceWaitIdle(device, result);
return result;
}
-bool PreAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo)
-{
- if(pAllocateInfo != nullptr)
- {
- if(pAllocateInfo->sType != VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkAllocateMemory parameter, VkStructureType pAllocateInfo->sType, is an invalid enumerator");
- return false;
- }
+bool PreAllocateMemory(VkDevice device,
+ const VkMemoryAllocateInfo *pAllocateInfo) {
+ if (pAllocateInfo != nullptr) {
+ if (pAllocateInfo->sType != VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkAllocateMemory parameter, VkStructureType "
+ "pAllocateInfo->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-bool PostAllocateMemory(
- VkDevice device,
- VkDeviceMemory* pMemory,
- VkResult result)
-{
+bool PostAllocateMemory(VkDevice device, VkDeviceMemory *pMemory,
+ VkResult result) {
- if(pMemory != nullptr)
- {
+ if (pMemory != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkAllocateMemory parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkAllocateMemory parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(
- VkDevice device,
- const VkMemoryAllocateInfo* pAllocateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDeviceMemory* pMemory)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDeviceMemory *pMemory) {
PreAllocateMemory(device, pAllocateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
PostAllocateMemory(device, pMemory, result);
return result;
}
-bool PostMapMemory(
- VkDevice device,
- VkDeviceMemory mem,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData,
- VkResult result)
-{
+bool PostMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset,
+ VkDeviceSize size, VkMemoryMapFlags flags, void **ppData,
+ VkResult result) {
-
-
-
-
- if(ppData != nullptr)
- {
+ if (ppData != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkMapMemory parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkMapMemory parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(
- VkDevice device,
- VkDeviceMemory mem,
- VkDeviceSize offset,
- VkDeviceSize size,
- VkMemoryMapFlags flags,
- void** ppData)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->MapMemory(device, mem, offset, size, flags, ppData);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset,
+ VkDeviceSize size, VkMemoryMapFlags flags, void **ppData) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->MapMemory(device, mem, offset, size, flags, ppData);
PostMapMemory(device, mem, offset, size, flags, ppData, result);
return result;
}
-bool PreFlushMappedMemoryRanges(
- VkDevice device,
- const VkMappedMemoryRange* pMemoryRanges)
-{
- if(pMemoryRanges != nullptr)
- {
- if(pMemoryRanges->sType != VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkFlushMappedMemoryRanges parameter, VkStructureType pMemoryRanges->sType, is an invalid enumerator");
- return false;
- }
+bool PreFlushMappedMemoryRanges(VkDevice device,
+ const VkMappedMemoryRange *pMemoryRanges) {
+ if (pMemoryRanges != nullptr) {
+ if (pMemoryRanges->sType != VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkFlushMappedMemoryRanges parameter, VkStructureType "
+ "pMemoryRanges->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-bool PostFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- VkResult result)
-{
+bool PostFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
+ VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkFlushMappedMemoryRanges parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkFlushMappedMemoryRanges parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
+ const VkMappedMemoryRange *pMemoryRanges) {
PreFlushMappedMemoryRanges(device, pMemoryRanges);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->FlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
PostFlushMappedMemoryRanges(device, memoryRangeCount, result);
return result;
}
-bool PreInvalidateMappedMemoryRanges(
- VkDevice device,
- const VkMappedMemoryRange* pMemoryRanges)
-{
- if(pMemoryRanges != nullptr)
- {
- if(pMemoryRanges->sType != VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkInvalidateMappedMemoryRanges parameter, VkStructureType pMemoryRanges->sType, is an invalid enumerator");
- return false;
- }
+bool PreInvalidateMappedMemoryRanges(VkDevice device,
+ const VkMappedMemoryRange *pMemoryRanges) {
+ if (pMemoryRanges != nullptr) {
+ if (pMemoryRanges->sType != VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkInvalidateMappedMemoryRanges parameter, VkStructureType "
+ "pMemoryRanges->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-bool PostInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- VkResult result)
-{
+bool PostInvalidateMappedMemoryRanges(VkDevice device,
+ uint32_t memoryRangeCount,
+ VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkInvalidateMappedMemoryRanges parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkInvalidateMappedMemoryRanges parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(
- VkDevice device,
- uint32_t memoryRangeCount,
- const VkMappedMemoryRange* pMemoryRanges)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
+ const VkMappedMemoryRange *pMemoryRanges) {
PreInvalidateMappedMemoryRanges(device, pMemoryRanges);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->InvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->InvalidateMappedMemoryRanges(
+ device, memoryRangeCount, pMemoryRanges);
PostInvalidateMappedMemoryRanges(device, memoryRangeCount, result);
return result;
}
-bool PostGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes)
-{
+bool PostGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
+ VkDeviceSize *pCommittedMemoryInBytes) {
-
- if(pCommittedMemoryInBytes != nullptr)
- {
+ if (pCommittedMemoryInBytes != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(
- VkDevice device,
- VkDeviceMemory memory,
- VkDeviceSize* pCommittedMemoryInBytes)
-{
- get_dispatch_table(pc_device_table_map, device)->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
+ VkDeviceSize *pCommittedMemoryInBytes) {
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
PostGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
}
-bool PostBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory mem,
- VkDeviceSize memoryOffset,
- VkResult result)
-{
+bool PostBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset, VkResult result) {
-
-
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkBindBufferMemory parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkBindBufferMemory parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(
- VkDevice device,
- VkBuffer buffer,
- VkDeviceMemory mem,
- VkDeviceSize memoryOffset)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->BindBufferMemory(device, buffer, mem, memoryOffset);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->BindBufferMemory(device, buffer, mem, memoryOffset);
PostBindBufferMemory(device, buffer, mem, memoryOffset, result);
return result;
}
-bool PostBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory mem,
- VkDeviceSize memoryOffset,
- VkResult result)
-{
+bool PostBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset, VkResult result) {
-
-
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkBindImageMemory parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkBindImageMemory parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(
- VkDevice device,
- VkImage image,
- VkDeviceMemory mem,
- VkDeviceSize memoryOffset)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->BindImageMemory(device, image, mem, memoryOffset);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
+ VkDeviceSize memoryOffset) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->BindImageMemory(device, image, mem, memoryOffset);
PostBindImageMemory(device, image, mem, memoryOffset, result);
return result;
}
-bool PostGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements)
-{
+bool
+PostGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
+ VkMemoryRequirements *pMemoryRequirements) {
-
- if(pMemoryRequirements != nullptr)
- {
+ if (pMemoryRequirements != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(
- VkDevice device,
- VkBuffer buffer,
- VkMemoryRequirements* pMemoryRequirements)
-{
- get_dispatch_table(pc_device_table_map, device)->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
+ VkMemoryRequirements *pMemoryRequirements) {
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
PostGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
}
-bool PostGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements)
-{
+bool PostGetImageMemoryRequirements(VkDevice device, VkImage image,
+ VkMemoryRequirements *pMemoryRequirements) {
-
- if(pMemoryRequirements != nullptr)
- {
+ if (pMemoryRequirements != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(
- VkDevice device,
- VkImage image,
- VkMemoryRequirements* pMemoryRequirements)
-{
- get_dispatch_table(pc_device_table_map, device)->GetImageMemoryRequirements(device, image, pMemoryRequirements);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetImageMemoryRequirements(VkDevice device, VkImage image,
+ VkMemoryRequirements *pMemoryRequirements) {
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetImageMemoryRequirements(device, image, pMemoryRequirements);
PostGetImageMemoryRequirements(device, image, pMemoryRequirements);
}
bool PostGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pNumRequirements,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
-{
+ VkDevice device, VkImage image, uint32_t *pNumRequirements,
+ VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
-
- if(pNumRequirements != nullptr)
- {
+ if (pNumRequirements != nullptr) {
}
- if(pSparseMemoryRequirements != nullptr)
- {
- if ((pSparseMemoryRequirements->formatProperties.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetImageSparseMemoryRequirements parameter, VkImageAspect pSparseMemoryRequirements->formatProperties.aspectMask, is an unrecognized enumerator");
- return false;
- }
+ if (pSparseMemoryRequirements != nullptr) {
+ if ((pSparseMemoryRequirements->formatProperties.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetImageSparseMemoryRequirements parameter, "
+ "VkImageAspect "
+ "pSparseMemoryRequirements->formatProperties.aspectMask, "
+ "is an unrecognized enumerator");
+ return false;
+ }
}
return true;
}
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(
- VkDevice device,
- VkImage image,
- uint32_t* pNumRequirements,
- VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
-{
- get_dispatch_table(pc_device_table_map, device)->GetImageSparseMemoryRequirements(device, image, pNumRequirements, pSparseMemoryRequirements);
+ VkDevice device, VkImage image, uint32_t *pNumRequirements,
+ VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetImageSparseMemoryRequirements(device, image, pNumRequirements,
+ pSparseMemoryRequirements);
- PostGetImageSparseMemoryRequirements(device, image, pNumRequirements, pSparseMemoryRequirements);
+ PostGetImageSparseMemoryRequirements(device, image, pNumRequirements,
+ pSparseMemoryRequirements);
}
bool PostGetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pNumProperties,
- VkSparseImageFormatProperties* pProperties)
-{
+ VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+ VkSampleCountFlagBits samples, VkImageUsageFlags usage,
+ VkImageTiling tiling, uint32_t *pNumProperties,
+ VkSparseImageFormatProperties *pProperties) {
- if(format < VK_FORMAT_BEGIN_RANGE ||
- format > VK_FORMAT_END_RANGE)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceSparseImageFormatProperties parameter, VkFormat format, is an unrecognized enumerator");
+ if (format < VK_FORMAT_BEGIN_RANGE || format > VK_FORMAT_END_RANGE) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceSparseImageFormatProperties parameter, "
+ "VkFormat format, is an unrecognized enumerator");
return false;
}
- if(type < VK_IMAGE_TYPE_BEGIN_RANGE ||
- type > VK_IMAGE_TYPE_END_RANGE)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceSparseImageFormatProperties parameter, VkImageType type, is an unrecognized enumerator");
+ if (type < VK_IMAGE_TYPE_BEGIN_RANGE || type > VK_IMAGE_TYPE_END_RANGE) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceSparseImageFormatProperties parameter, "
+ "VkImageType type, is an unrecognized enumerator");
return false;
}
-
-
- if(tiling < VK_IMAGE_TILING_BEGIN_RANGE ||
- tiling > VK_IMAGE_TILING_END_RANGE)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceSparseImageFormatProperties parameter, VkImageTiling tiling, is an unrecognized enumerator");
+ if (tiling < VK_IMAGE_TILING_BEGIN_RANGE ||
+ tiling > VK_IMAGE_TILING_END_RANGE) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceSparseImageFormatProperties parameter, "
+ "VkImageTiling tiling, is an unrecognized enumerator");
return false;
}
- if(pNumProperties != nullptr)
- {
+ if (pNumProperties != nullptr) {
}
- if(pProperties != nullptr)
- {
- if ((pProperties->aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetPhysicalDeviceSparseImageFormatProperties parameter, VkImageAspect pProperties->aspectMask, is an unrecognized enumerator");
- return false;
- }
+ if (pProperties != nullptr) {
+ if ((pProperties->aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(physicalDevice), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetPhysicalDeviceSparseImageFormatProperties parameter, "
+ "VkImageAspect pProperties->aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(
- VkPhysicalDevice physicalDevice,
- VkFormat format,
- VkImageType type,
- VkSampleCountFlagBits samples,
- VkImageUsageFlags usage,
- VkImageTiling tiling,
- uint32_t* pNumProperties,
- VkSparseImageFormatProperties* pProperties)
-{
- get_dispatch_table(pc_instance_table_map, physicalDevice)->GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pNumProperties, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceSparseImageFormatProperties(
+ VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
+ VkSampleCountFlagBits samples, VkImageUsageFlags usage,
+ VkImageTiling tiling, uint32_t *pNumProperties,
+ VkSparseImageFormatProperties *pProperties) {
+ get_dispatch_table(pc_instance_table_map, physicalDevice)
+ ->GetPhysicalDeviceSparseImageFormatProperties(
+ physicalDevice, format, type, samples, usage, tiling,
+ pNumProperties, pProperties);
- PostGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pNumProperties, pProperties);
+ PostGetPhysicalDeviceSparseImageFormatProperties(
+ physicalDevice, format, type, samples, usage, tiling, pNumProperties,
+ pProperties);
}
-bool PreQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo)
-{
- if(pBindInfo != nullptr)
- {
+bool PreQueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
+ const VkBindSparseInfo *pBindInfo) {
+ if (pBindInfo != nullptr) {
}
return true;
}
-bool PostQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence,
- VkResult result)
-{
+bool PostQueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
+ const VkBindSparseInfo *pBindInfo, VkFence fence,
+ VkResult result) {
-
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkQueueBindSparse parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkQueueBindSparse parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(queue), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(
- VkQueue queue,
- uint32_t bindInfoCount,
- const VkBindSparseInfo* pBindInfo,
- VkFence fence)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
+ const VkBindSparseInfo *pBindInfo, VkFence fence) {
PreQueueBindSparse(queue, bindInfoCount, pBindInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, queue)
+ ->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
PostQueueBindSparse(queue, bindInfoCount, pBindInfo, fence, result);
return result;
}
-bool PreCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_FENCE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateFence parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
+bool PreCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_FENCE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateFence parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCreateFence(
- VkDevice device,
- VkFence* pFence,
- VkResult result)
-{
+bool PostCreateFence(VkDevice device, VkFence *pFence, VkResult result) {
- if(pFence != nullptr)
- {
+ if (pFence != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateFence parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkCreateFence parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(
- VkDevice device,
- const VkFenceCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFence* pFence)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
PreCreateFence(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateFence(device, pCreateInfo, pAllocator, pFence);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateFence(device, pCreateInfo, pAllocator, pFence);
PostCreateFence(device, pFence, result);
return result;
}
-bool PreResetFences(
- VkDevice device,
- const VkFence* pFences)
-{
- if(pFences != nullptr)
- {
+bool PreResetFences(VkDevice device, const VkFence *pFences) {
+ if (pFences != nullptr) {
}
return true;
}
-bool PostResetFences(
- VkDevice device,
- uint32_t fenceCount,
- VkResult result)
-{
+bool PostResetFences(VkDevice device, uint32_t fenceCount, VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkResetFences parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkResetFences parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetFences(VkDevice device, uint32_t fenceCount,
+ const VkFence *pFences) {
PreResetFences(device, pFences);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->ResetFences(device, fenceCount, pFences);
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->ResetFences(device, fenceCount, pFences);
PostResetFences(device, fenceCount, result);
return result;
}
-bool PostGetFenceStatus(
- VkDevice device,
- VkFence fence,
- VkResult result)
-{
+bool PostGetFenceStatus(VkDevice device, VkFence fence, VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkGetFenceStatus parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkGetFenceStatus parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(
- VkDevice device,
- VkFence fence)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->GetFenceStatus(device, fence);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetFenceStatus(VkDevice device, VkFence fence) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->GetFenceStatus(device, fence);
PostGetFenceStatus(device, fence, result);
return result;
}
-bool PreWaitForFences(
- VkDevice device,
- const VkFence* pFences)
-{
- if(pFences != nullptr)
- {
+bool PreWaitForFences(VkDevice device, const VkFence *pFences) {
+ if (pFences != nullptr) {
}
return true;
}
-bool PostWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- VkBool32 waitAll,
- uint64_t timeout,
- VkResult result)
-{
+bool PostWaitForFences(VkDevice device, uint32_t fenceCount, VkBool32 waitAll,
+ uint64_t timeout, VkResult result) {
-
-
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkWaitForFences parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkWaitForFences parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(
- VkDevice device,
- uint32_t fenceCount,
- const VkFence* pFences,
- VkBool32 waitAll,
- uint64_t timeout)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkWaitForFences(VkDevice device, uint32_t fenceCount,
+ const VkFence *pFences, VkBool32 waitAll,
+ uint64_t timeout) {
PreWaitForFences(device, pFences);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
PostWaitForFences(device, fenceCount, waitAll, timeout, result);
return result;
}
-bool PreCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSemaphore parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
+bool PreCreateSemaphore(VkDevice device,
+ const VkSemaphoreCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSemaphore parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCreateSemaphore(
- VkDevice device,
- VkSemaphore* pSemaphore,
- VkResult result)
-{
+bool PostCreateSemaphore(VkDevice device, VkSemaphore *pSemaphore,
+ VkResult result) {
- if(pSemaphore != nullptr)
- {
+ if (pSemaphore != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateSemaphore parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateSemaphore parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(
- VkDevice device,
- const VkSemaphoreCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSemaphore* pSemaphore)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSemaphore *pSemaphore) {
PreCreateSemaphore(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
PostCreateSemaphore(device, pSemaphore, result);
return result;
}
-bool PreCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_EVENT_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateEvent parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
+bool PreCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_EVENT_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateEvent parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCreateEvent(
- VkDevice device,
- VkEvent* pEvent,
- VkResult result)
-{
+bool PostCreateEvent(VkDevice device, VkEvent *pEvent, VkResult result) {
- if(pEvent != nullptr)
- {
+ if (pEvent != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateEvent parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkCreateEvent parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(
- VkDevice device,
- const VkEventCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkEvent* pEvent)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
PreCreateEvent(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
PostCreateEvent(device, pEvent, result);
return result;
}
-bool PostGetEventStatus(
- VkDevice device,
- VkEvent event,
- VkResult result)
-{
+bool PostGetEventStatus(VkDevice device, VkEvent event, VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkGetEventStatus parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkGetEventStatus parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(
- VkDevice device,
- VkEvent event)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->GetEventStatus(device, event);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetEventStatus(VkDevice device, VkEvent event) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->GetEventStatus(device, event);
PostGetEventStatus(device, event, result);
return result;
}
-bool PostSetEvent(
- VkDevice device,
- VkEvent event,
- VkResult result)
-{
+bool PostSetEvent(VkDevice device, VkEvent event, VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkSetEvent parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkSetEvent parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(
- VkDevice device,
- VkEvent event)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->SetEvent(device, event);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkSetEvent(VkDevice device, VkEvent event) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->SetEvent(device, event);
PostSetEvent(device, event, result);
return result;
}
-bool PostResetEvent(
- VkDevice device,
- VkEvent event,
- VkResult result)
-{
+bool PostResetEvent(VkDevice device, VkEvent event, VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkResetEvent parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkResetEvent parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(
- VkDevice device,
- VkEvent event)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->ResetEvent(device, event);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetEvent(VkDevice device, VkEvent event) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->ResetEvent(device, event);
PostResetEvent(device, event, result);
return result;
}
-bool PreCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateQueryPool parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->queryType < VK_QUERY_TYPE_BEGIN_RANGE ||
- pCreateInfo->queryType > VK_QUERY_TYPE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateQueryPool parameter, VkQueryType pCreateInfo->queryType, is an unrecognized enumerator");
- return false;
- }
+bool PreCreateQueryPool(VkDevice device,
+ const VkQueryPoolCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateQueryPool parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->queryType < VK_QUERY_TYPE_BEGIN_RANGE ||
+ pCreateInfo->queryType > VK_QUERY_TYPE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateQueryPool parameter, VkQueryType "
+ "pCreateInfo->queryType, is an unrecognized enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCreateQueryPool(
- VkDevice device,
- VkQueryPool* pQueryPool,
- VkResult result)
-{
+bool PostCreateQueryPool(VkDevice device, VkQueryPool *pQueryPool,
+ VkResult result) {
- if(pQueryPool != nullptr)
- {
+ if (pQueryPool != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateQueryPool parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateQueryPool parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(
- VkDevice device,
- const VkQueryPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkQueryPool* pQueryPool)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkQueryPool *pQueryPool) {
PreCreateQueryPool(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
PostCreateQueryPool(device, pQueryPool, result);
return result;
}
-bool PostGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags,
- VkResult result)
-{
+bool PostGetQueryPoolResults(VkDevice device, VkQueryPool queryPool,
+ uint32_t firstQuery, uint32_t queryCount,
+ size_t dataSize, void *pData, VkDeviceSize stride,
+ VkQueryResultFlags flags, VkResult result) {
-
-
-
- if(pData != nullptr)
- {
+ if (pData != nullptr) {
}
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkGetQueryPoolResults parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkGetQueryPoolResults parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(
- VkDevice device,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- size_t dataSize,
- void* pData,
- VkDeviceSize stride,
- VkQueryResultFlags flags)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool,
+ uint32_t firstQuery, uint32_t queryCount,
+ size_t dataSize, void *pData, VkDeviceSize stride,
+ VkQueryResultFlags flags) {
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetQueryPoolResults(device, queryPool, firstQuery, queryCount,
+ dataSize, pData, stride, flags);
- PostGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags, result);
+ PostGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize,
+ pData, stride, flags, result);
return result;
}
-bool PreCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateBuffer parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->sharingMode < VK_SHARING_MODE_BEGIN_RANGE ||
- pCreateInfo->sharingMode > VK_SHARING_MODE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateBuffer parameter, VkSharingMode pCreateInfo->sharingMode, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->pQueueFamilyIndices != nullptr)
- {
- }
+bool PreCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateBuffer parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->sharingMode < VK_SHARING_MODE_BEGIN_RANGE ||
+ pCreateInfo->sharingMode > VK_SHARING_MODE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateBuffer parameter, VkSharingMode "
+ "pCreateInfo->sharingMode, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->pQueueFamilyIndices != nullptr) {
+ }
}
return true;
}
-bool PostCreateBuffer(
- VkDevice device,
- VkBuffer* pBuffer,
- VkResult result)
-{
+bool PostCreateBuffer(VkDevice device, VkBuffer *pBuffer, VkResult result) {
- if(pBuffer != nullptr)
- {
+ if (pBuffer != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateBuffer parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkCreateBuffer parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(
- VkDevice device,
- const VkBufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBuffer* pBuffer)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
PreCreateBuffer(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
PostCreateBuffer(device, pBuffer, result);
return result;
}
-bool PreCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateBufferView parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->format < VK_FORMAT_BEGIN_RANGE ||
- pCreateInfo->format > VK_FORMAT_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateBufferView parameter, VkFormat pCreateInfo->format, is an unrecognized enumerator");
- return false;
- }
+bool PreCreateBufferView(VkDevice device,
+ const VkBufferViewCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateBufferView parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->format < VK_FORMAT_BEGIN_RANGE ||
+ pCreateInfo->format > VK_FORMAT_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateBufferView parameter, VkFormat "
+ "pCreateInfo->format, is an unrecognized enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCreateBufferView(
- VkDevice device,
- VkBufferView* pView,
- VkResult result)
-{
+bool PostCreateBufferView(VkDevice device, VkBufferView *pView,
+ VkResult result) {
- if(pView != nullptr)
- {
+ if (pView != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateBufferView parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateBufferView parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(
- VkDevice device,
- const VkBufferViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkBufferView* pView)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateBufferView(VkDevice device,
+ const VkBufferViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkBufferView *pView) {
PreCreateBufferView(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateBufferView(device, pCreateInfo, pAllocator, pView);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateBufferView(device, pCreateInfo, pAllocator, pView);
PostCreateBufferView(device, pView, result);
return result;
}
-bool PreCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImage parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->imageType < VK_IMAGE_TYPE_BEGIN_RANGE ||
- pCreateInfo->imageType > VK_IMAGE_TYPE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImage parameter, VkImageType pCreateInfo->imageType, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->format < VK_FORMAT_BEGIN_RANGE ||
- pCreateInfo->format > VK_FORMAT_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImage parameter, VkFormat pCreateInfo->format, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->tiling < VK_IMAGE_TILING_BEGIN_RANGE ||
- pCreateInfo->tiling > VK_IMAGE_TILING_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImage parameter, VkImageTiling pCreateInfo->tiling, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->sharingMode < VK_SHARING_MODE_BEGIN_RANGE ||
- pCreateInfo->sharingMode > VK_SHARING_MODE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImage parameter, VkSharingMode pCreateInfo->sharingMode, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->pQueueFamilyIndices != nullptr)
- {
- }
+bool PreCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImage parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->imageType < VK_IMAGE_TYPE_BEGIN_RANGE ||
+ pCreateInfo->imageType > VK_IMAGE_TYPE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImage parameter, VkImageType "
+ "pCreateInfo->imageType, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->format < VK_FORMAT_BEGIN_RANGE ||
+ pCreateInfo->format > VK_FORMAT_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImage parameter, VkFormat pCreateInfo->format, is "
+ "an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->tiling < VK_IMAGE_TILING_BEGIN_RANGE ||
+ pCreateInfo->tiling > VK_IMAGE_TILING_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImage parameter, VkImageTiling "
+ "pCreateInfo->tiling, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->sharingMode < VK_SHARING_MODE_BEGIN_RANGE ||
+ pCreateInfo->sharingMode > VK_SHARING_MODE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImage parameter, VkSharingMode "
+ "pCreateInfo->sharingMode, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->pQueueFamilyIndices != nullptr) {
+ }
}
return true;
}
-bool PostCreateImage(
- VkDevice device,
- VkImage* pImage,
- VkResult result)
-{
+bool PostCreateImage(VkDevice device, VkImage *pImage, VkResult result) {
- if(pImage != nullptr)
- {
+ if (pImage != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateImage parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkCreateImage parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(
- VkDevice device,
- const VkImageCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImage* pImage)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
PreCreateImage(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateImage(device, pCreateInfo, pAllocator, pImage);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateImage(device, pCreateInfo, pAllocator, pImage);
PostCreateImage(device, pImage, result);
return result;
}
-bool PreGetImageSubresourceLayout(
- VkDevice device,
- const VkImageSubresource* pSubresource)
-{
- if(pSubresource != nullptr)
- {
- if ((pSubresource->aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkGetImageSubresourceLayout parameter, VkImageAspect pSubresource->aspectMask, is an unrecognized enumerator");
- return false;
- }
+bool PreGetImageSubresourceLayout(VkDevice device,
+ const VkImageSubresource *pSubresource) {
+ if (pSubresource != nullptr) {
+ if ((pSubresource->aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkGetImageSubresourceLayout parameter, VkImageAspect "
+ "pSubresource->aspectMask, is an unrecognized enumerator");
+ return false;
+ }
}
return true;
}
-bool PostGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- VkSubresourceLayout* pLayout)
-{
+bool PostGetImageSubresourceLayout(VkDevice device, VkImage image,
+ VkSubresourceLayout *pLayout) {
-
- if(pLayout != nullptr)
- {
+ if (pLayout != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(
- VkDevice device,
- VkImage image,
- const VkImageSubresource* pSubresource,
- VkSubresourceLayout* pLayout)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetImageSubresourceLayout(VkDevice device, VkImage image,
+ const VkImageSubresource *pSubresource,
+ VkSubresourceLayout *pLayout) {
PreGetImageSubresourceLayout(device, pSubresource);
- get_dispatch_table(pc_device_table_map, device)->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
PostGetImageSubresourceLayout(device, image, pLayout);
}
-bool PreCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImageView parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->viewType < VK_IMAGE_VIEW_TYPE_BEGIN_RANGE ||
- pCreateInfo->viewType > VK_IMAGE_VIEW_TYPE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImageView parameter, VkImageViewType pCreateInfo->viewType, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->format < VK_FORMAT_BEGIN_RANGE ||
- pCreateInfo->format > VK_FORMAT_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImageView parameter, VkFormat pCreateInfo->format, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->components.r < VK_COMPONENT_SWIZZLE_BEGIN_RANGE ||
- pCreateInfo->components.r > VK_COMPONENT_SWIZZLE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImageView parameter, VkComponentSwizzle pCreateInfo->components.r, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->components.g < VK_COMPONENT_SWIZZLE_BEGIN_RANGE ||
- pCreateInfo->components.g > VK_COMPONENT_SWIZZLE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImageView parameter, VkComponentSwizzle pCreateInfo->components.g, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->components.b < VK_COMPONENT_SWIZZLE_BEGIN_RANGE ||
- pCreateInfo->components.b > VK_COMPONENT_SWIZZLE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImageView parameter, VkComponentSwizzle pCreateInfo->components.b, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->components.a < VK_COMPONENT_SWIZZLE_BEGIN_RANGE ||
- pCreateInfo->components.a > VK_COMPONENT_SWIZZLE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateImageView parameter, VkComponentSwizzle pCreateInfo->components.a, is an unrecognized enumerator");
- return false;
- }
+bool PreCreateImageView(VkDevice device,
+ const VkImageViewCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImageView parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->viewType < VK_IMAGE_VIEW_TYPE_BEGIN_RANGE ||
+ pCreateInfo->viewType > VK_IMAGE_VIEW_TYPE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImageView parameter, VkImageViewType "
+ "pCreateInfo->viewType, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->format < VK_FORMAT_BEGIN_RANGE ||
+ pCreateInfo->format > VK_FORMAT_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImageView parameter, VkFormat "
+ "pCreateInfo->format, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->components.r < VK_COMPONENT_SWIZZLE_BEGIN_RANGE ||
+ pCreateInfo->components.r > VK_COMPONENT_SWIZZLE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImageView parameter, VkComponentSwizzle "
+ "pCreateInfo->components.r, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->components.g < VK_COMPONENT_SWIZZLE_BEGIN_RANGE ||
+ pCreateInfo->components.g > VK_COMPONENT_SWIZZLE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImageView parameter, VkComponentSwizzle "
+ "pCreateInfo->components.g, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->components.b < VK_COMPONENT_SWIZZLE_BEGIN_RANGE ||
+ pCreateInfo->components.b > VK_COMPONENT_SWIZZLE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImageView parameter, VkComponentSwizzle "
+ "pCreateInfo->components.b, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->components.a < VK_COMPONENT_SWIZZLE_BEGIN_RANGE ||
+ pCreateInfo->components.a > VK_COMPONENT_SWIZZLE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateImageView parameter, VkComponentSwizzle "
+ "pCreateInfo->components.a, is an unrecognized enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCreateImageView(
- VkDevice device,
- VkImageView* pView,
- VkResult result)
-{
+bool PostCreateImageView(VkDevice device, VkImageView *pView, VkResult result) {
- if(pView != nullptr)
- {
+ if (pView != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateImageView parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateImageView parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(
- VkDevice device,
- const VkImageViewCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkImageView* pView)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkImageView *pView) {
PreCreateImageView(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateImageView(device, pCreateInfo, pAllocator, pView);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateImageView(device, pCreateInfo, pAllocator, pView);
PostCreateImageView(device, pView, result);
return result;
}
-bool PreCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo)
-{
- if(pCreateInfo) {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO) {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateShaderModule parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
+bool PreCreateShaderModule(VkDevice device,
+ const VkShaderModuleCreateInfo *pCreateInfo) {
+ if (pCreateInfo) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateShaderModule parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
return false;
}
- if(!pCreateInfo->pCode) {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateShaderModule paramter, void* pCreateInfo->pCode, is null");
+ if (!pCreateInfo->pCode) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateShaderModule paramter, void* pCreateInfo->pCode, "
+ "is null");
return false;
}
} else {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateShaderModule parameter, VkShaderModuleCreateInfo pCreateInfo, is null");
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateShaderModule parameter, VkShaderModuleCreateInfo "
+ "pCreateInfo, is null");
return false;
}
return true;
}
-bool PostCreateShaderModule(
- VkDevice device,
- VkShaderModule* pShaderModule,
- VkResult result)
-{
- if(result < VK_SUCCESS) {
- std::string reason = "vkCreateShaderModule parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+bool PostCreateShaderModule(VkDevice device, VkShaderModule *pShaderModule,
+ VkResult result) {
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateShaderModule parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(
- VkDevice device,
- const VkShaderModuleCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkShaderModule* pShaderModule)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateShaderModule(VkDevice device,
+ const VkShaderModuleCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkShaderModule *pShaderModule) {
PreCreateShaderModule(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->CreateShaderModule(device, pCreateInfo, pAllocator,
+ pShaderModule);
PostCreateShaderModule(device, pShaderModule, result);
return result;
}
-bool PreCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreatePipelineCache parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->pInitialData != nullptr)
- {
- }
+bool PreCreatePipelineCache(VkDevice device,
+ const VkPipelineCacheCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreatePipelineCache parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->pInitialData != nullptr) {
+ }
}
return true;
}
-bool PostCreatePipelineCache(
- VkDevice device,
- VkPipelineCache* pPipelineCache,
- VkResult result)
-{
+bool PostCreatePipelineCache(VkDevice device, VkPipelineCache *pPipelineCache,
+ VkResult result) {
- if(pPipelineCache != nullptr)
- {
+ if (pPipelineCache != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreatePipelineCache parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreatePipelineCache parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(
- VkDevice device,
- const VkPipelineCacheCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineCache* pPipelineCache)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreatePipelineCache(VkDevice device,
+ const VkPipelineCacheCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkPipelineCache *pPipelineCache) {
PreCreatePipelineCache(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->CreatePipelineCache(device, pCreateInfo, pAllocator,
+ pPipelineCache);
PostCreatePipelineCache(device, pPipelineCache, result);
return result;
}
-bool PostGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData,
- VkResult result)
-{
+bool PostGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache,
+ size_t *pDataSize, void *pData, VkResult result) {
-
- if(pDataSize != nullptr)
- {
+ if (pDataSize != nullptr) {
}
- if(pData != nullptr)
- {
+ if (pData != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkGetPipelineCacheData parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkGetPipelineCacheData parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(
- VkDevice device,
- VkPipelineCache pipelineCache,
- size_t* pDataSize,
- void* pData)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache,
+ size_t *pDataSize, void *pData) {
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
PostGetPipelineCacheData(device, pipelineCache, pDataSize, pData, result);
return result;
}
-bool PreMergePipelineCaches(
- VkDevice device,
- const VkPipelineCache* pSrcCaches)
-{
- if(pSrcCaches != nullptr)
- {
+bool PreMergePipelineCaches(VkDevice device,
+ const VkPipelineCache *pSrcCaches) {
+ if (pSrcCaches != nullptr) {
}
return true;
}
-bool PostMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- VkResult result)
-{
+bool PostMergePipelineCaches(VkDevice device, VkPipelineCache dstCache,
+ uint32_t srcCacheCount, VkResult result) {
-
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkMergePipelineCaches parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkMergePipelineCaches parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(
- VkDevice device,
- VkPipelineCache dstCache,
- uint32_t srcCacheCount,
- const VkPipelineCache* pSrcCaches)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache,
+ uint32_t srcCacheCount,
+ const VkPipelineCache *pSrcCaches) {
PreMergePipelineCaches(device, pSrcCaches);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->MergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
PostMergePipelineCaches(device, dstCache, srcCacheCount, result);
return result;
}
-bool PreCreateGraphicsPipelines(
- VkDevice device,
- const VkGraphicsPipelineCreateInfo* pCreateInfos)
-{
- if(pCreateInfos != nullptr)
- {
- if(pCreateInfos->sType != VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->pStages != nullptr)
- {
- if(pCreateInfos->pStages->sType != VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pStages->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->pStages->pSpecializationInfo != nullptr)
- {
- if(pCreateInfos->pStages->pSpecializationInfo->pMapEntries != nullptr)
- {
- }
- if(pCreateInfos->pStages->pSpecializationInfo->pData != nullptr)
- {
- }
- }
- }
- if(pCreateInfos->pVertexInputState != nullptr)
- {
- if(pCreateInfos->pVertexInputState->sType != VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pVertexInputState->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->pVertexInputState->pVertexBindingDescriptions != nullptr)
- {
- if(pCreateInfos->pVertexInputState->pVertexBindingDescriptions->inputRate < VK_VERTEX_INPUT_RATE_BEGIN_RANGE ||
- pCreateInfos->pVertexInputState->pVertexBindingDescriptions->inputRate > VK_VERTEX_INPUT_RATE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkVertexInputRate pCreateInfos->pVertexInputState->pVertexBindingDescriptions->inputRate, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfos->pVertexInputState->pVertexAttributeDescriptions != nullptr)
- {
- if(pCreateInfos->pVertexInputState->pVertexAttributeDescriptions->format < VK_FORMAT_BEGIN_RANGE ||
- pCreateInfos->pVertexInputState->pVertexAttributeDescriptions->format > VK_FORMAT_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkFormat pCreateInfos->pVertexInputState->pVertexAttributeDescriptions->format, is an unrecognized enumerator");
- return false;
- }
- }
- }
- if(pCreateInfos->pInputAssemblyState != nullptr)
- {
- if(pCreateInfos->pInputAssemblyState->sType != VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pInputAssemblyState->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->pInputAssemblyState->topology < VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE ||
- pCreateInfos->pInputAssemblyState->topology > VK_PRIMITIVE_TOPOLOGY_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkPrimitiveTopology pCreateInfos->pInputAssemblyState->topology, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfos->pTessellationState != nullptr)
- {
- if(pCreateInfos->pTessellationState->sType != VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pTessellationState->sType, is an invalid enumerator");
- return false;
- }
- }
- if(pCreateInfos->pViewportState != nullptr)
- {
- if(pCreateInfos->pViewportState->sType != VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pViewportState->sType, is an invalid enumerator");
- return false;
- }
- }
- if(pCreateInfos->pRasterizationState != nullptr)
- {
- if(pCreateInfos->pRasterizationState->sType != VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pRasterizationState->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->pRasterizationState->polygonMode < VK_POLYGON_MODE_BEGIN_RANGE ||
- pCreateInfos->pRasterizationState->polygonMode > VK_POLYGON_MODE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkPolygonMode pCreateInfos->pRasterizationState->polygonMode, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pRasterizationState->cullMode & ~VK_CULL_MODE_FRONT_AND_BACK)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkCullMode pCreateInfos->pRasterizationState->cullMode, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pRasterizationState->frontFace < VK_FRONT_FACE_BEGIN_RANGE ||
- pCreateInfos->pRasterizationState->frontFace > VK_FRONT_FACE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkFrontFace pCreateInfos->pRasterizationState->frontFace, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfos->pMultisampleState != nullptr)
- {
- if(pCreateInfos->pMultisampleState->sType != VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pMultisampleState->sType, is an invalid enumerator");
- return false;
- }
- }
- if(pCreateInfos->pDepthStencilState != nullptr)
- {
- if(pCreateInfos->pDepthStencilState->sType != VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pDepthStencilState->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->depthCompareOp < VK_COMPARE_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->depthCompareOp > VK_COMPARE_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkCompareOp pCreateInfos->pDepthStencilState->depthCompareOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->front.failOp < VK_STENCIL_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->front.failOp > VK_STENCIL_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStencilOp pCreateInfos->pDepthStencilState->front.failOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->front.passOp < VK_STENCIL_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->front.passOp > VK_STENCIL_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStencilOp pCreateInfos->pDepthStencilState->front.passOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->front.depthFailOp < VK_STENCIL_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->front.depthFailOp > VK_STENCIL_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStencilOp pCreateInfos->pDepthStencilState->front.depthFailOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->front.compareOp < VK_COMPARE_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->front.compareOp > VK_COMPARE_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkCompareOp pCreateInfos->pDepthStencilState->front.compareOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->back.failOp < VK_STENCIL_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->back.failOp > VK_STENCIL_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStencilOp pCreateInfos->pDepthStencilState->back.failOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->back.passOp < VK_STENCIL_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->back.passOp > VK_STENCIL_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStencilOp pCreateInfos->pDepthStencilState->back.passOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->back.depthFailOp < VK_STENCIL_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->back.depthFailOp > VK_STENCIL_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStencilOp pCreateInfos->pDepthStencilState->back.depthFailOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pDepthStencilState->back.compareOp < VK_COMPARE_OP_BEGIN_RANGE ||
- pCreateInfos->pDepthStencilState->back.compareOp > VK_COMPARE_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkCompareOp pCreateInfos->pDepthStencilState->back.compareOp, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfos->pColorBlendState != nullptr)
- {
- if(pCreateInfos->pColorBlendState->sType != VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkStructureType pCreateInfos->pColorBlendState->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->pColorBlendState->logicOpEnable == VK_TRUE &&
- pCreateInfos->pColorBlendState->logicOp < VK_LOGIC_OP_BEGIN_RANGE ||
- pCreateInfos->pColorBlendState->logicOp > VK_LOGIC_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkLogicOp pCreateInfos->pColorBlendState->logicOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pColorBlendState->pAttachments != nullptr && pCreateInfos->pColorBlendState->pAttachments->blendEnable == VK_TRUE)
- {
- if(pCreateInfos->pColorBlendState->pAttachments->srcColorBlendFactor < VK_BLEND_FACTOR_BEGIN_RANGE ||
- pCreateInfos->pColorBlendState->pAttachments->srcColorBlendFactor > VK_BLEND_FACTOR_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkBlendFactor pCreateInfos->pColorBlendState->pAttachments->srcColorBlendFactor, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pColorBlendState->pAttachments->dstColorBlendFactor < VK_BLEND_FACTOR_BEGIN_RANGE ||
- pCreateInfos->pColorBlendState->pAttachments->dstColorBlendFactor > VK_BLEND_FACTOR_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkBlendFactor pCreateInfos->pColorBlendState->pAttachments->dstColorBlendFactor, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pColorBlendState->pAttachments->colorBlendOp < VK_BLEND_OP_BEGIN_RANGE ||
- pCreateInfos->pColorBlendState->pAttachments->colorBlendOp > VK_BLEND_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkBlendOp pCreateInfos->pColorBlendState->pAttachments->colorBlendOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pColorBlendState->pAttachments->srcAlphaBlendFactor < VK_BLEND_FACTOR_BEGIN_RANGE ||
- pCreateInfos->pColorBlendState->pAttachments->srcAlphaBlendFactor > VK_BLEND_FACTOR_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkBlendFactor pCreateInfos->pColorBlendState->pAttachments->srcAlphaBlendFactor, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pColorBlendState->pAttachments->dstAlphaBlendFactor < VK_BLEND_FACTOR_BEGIN_RANGE ||
- pCreateInfos->pColorBlendState->pAttachments->dstAlphaBlendFactor > VK_BLEND_FACTOR_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkBlendFactor pCreateInfos->pColorBlendState->pAttachments->dstAlphaBlendFactor, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfos->pColorBlendState->pAttachments->alphaBlendOp < VK_BLEND_OP_BEGIN_RANGE ||
- pCreateInfos->pColorBlendState->pAttachments->alphaBlendOp > VK_BLEND_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkBlendOp pCreateInfos->pColorBlendState->pAttachments->alphaBlendOp, is an unrecognized enumerator");
- return false;
- }
- }
- }
- if(pCreateInfos->renderPass == VK_NULL_HANDLE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateGraphicsPipelines parameter, VkRenderPass pCreateInfos->renderPass, is null pointer");
- }
+bool
+PreCreateGraphicsPipelines(VkDevice device,
+ const VkGraphicsPipelineCreateInfo *pCreateInfos) {
+ if (pCreateInfos != nullptr) {
+ if (pCreateInfos->sType !=
+ VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkStructureType "
+ "pCreateInfos->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->pStages != nullptr) {
+ if (pCreateInfos->pStages->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
+ log_msg(
+ mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkStructureType "
+ "pCreateInfos->pStages->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->pStages->pSpecializationInfo != nullptr) {
+ if (pCreateInfos->pStages->pSpecializationInfo->pMapEntries !=
+ nullptr) {
+ }
+ if (pCreateInfos->pStages->pSpecializationInfo->pData !=
+ nullptr) {
+ }
+ }
+ }
+ if (pCreateInfos->pVertexInputState != nullptr) {
+ if (pCreateInfos->pVertexInputState->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStructureType "
+ "pCreateInfos->pVertexInputState->sType, "
+ "is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->pVertexInputState->pVertexBindingDescriptions !=
+ nullptr) {
+ if (pCreateInfos->pVertexInputState->pVertexBindingDescriptions
+ ->inputRate < VK_VERTEX_INPUT_RATE_BEGIN_RANGE ||
+ pCreateInfos->pVertexInputState->pVertexBindingDescriptions
+ ->inputRate > VK_VERTEX_INPUT_RATE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines "
+ "parameter, VkVertexInputRate "
+ "pCreateInfos->pVertexInputState->"
+ "pVertexBindingDescriptions->"
+ "inputRate, is an unrecognized "
+ "enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfos->pVertexInputState->pVertexAttributeDescriptions !=
+ nullptr) {
+ if (pCreateInfos->pVertexInputState
+ ->pVertexAttributeDescriptions->format <
+ VK_FORMAT_BEGIN_RANGE ||
+ pCreateInfos->pVertexInputState
+ ->pVertexAttributeDescriptions->format >
+ VK_FORMAT_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkFormat "
+ "pCreateInfos->pVertexInputState->"
+ "pVertexAttributeDescriptions->format, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ }
+ }
+ if (pCreateInfos->pInputAssemblyState != nullptr) {
+ if (pCreateInfos->pInputAssemblyState->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStructureType "
+ "pCreateInfos->pInputAssemblyState->"
+ "sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->pInputAssemblyState->topology <
+ VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE ||
+ pCreateInfos->pInputAssemblyState->topology >
+ VK_PRIMITIVE_TOPOLOGY_END_RANGE) {
+ log_msg(
+ mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkPrimitiveTopology "
+ "pCreateInfos->pInputAssemblyState->topology, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfos->pTessellationState != nullptr) {
+ if (pCreateInfos->pTessellationState->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStructureType "
+ "pCreateInfos->pTessellationState->sType,"
+ " is an invalid enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfos->pViewportState != nullptr) {
+ if (pCreateInfos->pViewportState->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStructureType "
+ "pCreateInfos->pViewportState->sType, is "
+ "an invalid enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfos->pRasterizationState != nullptr) {
+ if (pCreateInfos->pRasterizationState->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStructureType "
+ "pCreateInfos->pRasterizationState->"
+ "sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->pRasterizationState->polygonMode <
+ VK_POLYGON_MODE_BEGIN_RANGE ||
+ pCreateInfos->pRasterizationState->polygonMode >
+ VK_POLYGON_MODE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkPolygonMode "
+ "pCreateInfos->pRasterizationState->polygonMode, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pRasterizationState->cullMode &
+ ~VK_CULL_MODE_FRONT_AND_BACK) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkCullMode "
+ "pCreateInfos->pRasterizationState->cullMode, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pRasterizationState->frontFace <
+ VK_FRONT_FACE_BEGIN_RANGE ||
+ pCreateInfos->pRasterizationState->frontFace >
+ VK_FRONT_FACE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkFrontFace "
+ "pCreateInfos->pRasterizationState->frontFace, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfos->pMultisampleState != nullptr) {
+ if (pCreateInfos->pMultisampleState->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStructureType "
+ "pCreateInfos->pMultisampleState->sType, "
+ "is an invalid enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfos->pDepthStencilState != nullptr) {
+ if (pCreateInfos->pDepthStencilState->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStructureType "
+ "pCreateInfos->pDepthStencilState->sType,"
+ " is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->depthCompareOp <
+ VK_COMPARE_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->depthCompareOp >
+ VK_COMPARE_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkCompareOp "
+ "pCreateInfos->pDepthStencilState->depthCompareOp, is "
+ "an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->front.failOp <
+ VK_STENCIL_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->front.failOp >
+ VK_STENCIL_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStencilOp "
+ "pCreateInfos->pDepthStencilState->front."
+ "failOp, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->front.passOp <
+ VK_STENCIL_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->front.passOp >
+ VK_STENCIL_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStencilOp "
+ "pCreateInfos->pDepthStencilState->front."
+ "passOp, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->front.depthFailOp <
+ VK_STENCIL_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->front.depthFailOp >
+ VK_STENCIL_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkStencilOp "
+ "pCreateInfos->pDepthStencilState->front.depthFailOp, "
+ "is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->front.compareOp <
+ VK_COMPARE_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->front.compareOp >
+ VK_COMPARE_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkCompareOp "
+ "pCreateInfos->pDepthStencilState->front.compareOp, is "
+ "an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->back.failOp <
+ VK_STENCIL_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->back.failOp >
+ VK_STENCIL_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStencilOp "
+ "pCreateInfos->pDepthStencilState->back."
+ "failOp, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->back.passOp <
+ VK_STENCIL_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->back.passOp >
+ VK_STENCIL_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStencilOp "
+ "pCreateInfos->pDepthStencilState->back."
+ "passOp, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->back.depthFailOp <
+ VK_STENCIL_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->back.depthFailOp >
+ VK_STENCIL_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkStencilOp "
+ "pCreateInfos->pDepthStencilState->back.depthFailOp, "
+ "is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pDepthStencilState->back.compareOp <
+ VK_COMPARE_OP_BEGIN_RANGE ||
+ pCreateInfos->pDepthStencilState->back.compareOp >
+ VK_COMPARE_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkCompareOp "
+ "pCreateInfos->pDepthStencilState->back.compareOp, is "
+ "an unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfos->pColorBlendState != nullptr) {
+ if (pCreateInfos->pColorBlendState->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkStructureType "
+ "pCreateInfos->pColorBlendState->sType, "
+ "is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->pColorBlendState->logicOpEnable == VK_TRUE &&
+ pCreateInfos->pColorBlendState->logicOp <
+ VK_LOGIC_OP_BEGIN_RANGE ||
+ pCreateInfos->pColorBlendState->logicOp >
+ VK_LOGIC_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines parameter, "
+ "VkLogicOp "
+ "pCreateInfos->pColorBlendState->logicOp,"
+ " is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pColorBlendState->pAttachments != nullptr &&
+ pCreateInfos->pColorBlendState->pAttachments->blendEnable ==
+ VK_TRUE) {
+ if (pCreateInfos->pColorBlendState->pAttachments
+ ->srcColorBlendFactor <
+ VK_BLEND_FACTOR_BEGIN_RANGE ||
+ pCreateInfos->pColorBlendState->pAttachments
+ ->srcColorBlendFactor > VK_BLEND_FACTOR_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines "
+ "parameter, VkBlendFactor "
+ "pCreateInfos->pColorBlendState->"
+ "pAttachments->srcColorBlendFactor, "
+ "is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pColorBlendState->pAttachments
+ ->dstColorBlendFactor <
+ VK_BLEND_FACTOR_BEGIN_RANGE ||
+ pCreateInfos->pColorBlendState->pAttachments
+ ->dstColorBlendFactor > VK_BLEND_FACTOR_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines "
+ "parameter, VkBlendFactor "
+ "pCreateInfos->pColorBlendState->"
+ "pAttachments->dstColorBlendFactor, "
+ "is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pColorBlendState->pAttachments->colorBlendOp <
+ VK_BLEND_OP_BEGIN_RANGE ||
+ pCreateInfos->pColorBlendState->pAttachments->colorBlendOp >
+ VK_BLEND_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkBlendOp "
+ "pCreateInfos->pColorBlendState->pAttachments->"
+ "colorBlendOp, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pColorBlendState->pAttachments
+ ->srcAlphaBlendFactor <
+ VK_BLEND_FACTOR_BEGIN_RANGE ||
+ pCreateInfos->pColorBlendState->pAttachments
+ ->srcAlphaBlendFactor > VK_BLEND_FACTOR_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines "
+ "parameter, VkBlendFactor "
+ "pCreateInfos->pColorBlendState->"
+ "pAttachments->srcAlphaBlendFactor, "
+ "is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pColorBlendState->pAttachments
+ ->dstAlphaBlendFactor <
+ VK_BLEND_FACTOR_BEGIN_RANGE ||
+ pCreateInfos->pColorBlendState->pAttachments
+ ->dstAlphaBlendFactor > VK_BLEND_FACTOR_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateGraphicsPipelines "
+ "parameter, VkBlendFactor "
+ "pCreateInfos->pColorBlendState->"
+ "pAttachments->dstAlphaBlendFactor, "
+ "is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfos->pColorBlendState->pAttachments->alphaBlendOp <
+ VK_BLEND_OP_BEGIN_RANGE ||
+ pCreateInfos->pColorBlendState->pAttachments->alphaBlendOp >
+ VK_BLEND_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkBlendOp "
+ "pCreateInfos->pColorBlendState->pAttachments->"
+ "alphaBlendOp, is an unrecognized enumerator");
+ return false;
+ }
+ }
+ }
+ if (pCreateInfos->renderPass == VK_NULL_HANDLE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateGraphicsPipelines parameter, VkRenderPass "
+ "pCreateInfos->renderPass, is null pointer");
+ }
}
return true;
}
-bool PostCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t count,
- VkPipeline* pPipelines,
- VkResult result)
-{
+bool PostCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache,
+ uint32_t count, VkPipeline *pPipelines,
+ VkResult result) {
-
-
- if(pPipelines != nullptr)
- {
+ if (pPipelines != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateGraphicsPipelines parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateGraphicsPipelines parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t count,
- const VkGraphicsPipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache,
+ uint32_t count,
+ const VkGraphicsPipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator,
+ VkPipeline *pPipelines) {
PreCreateGraphicsPipelines(device, pCreateInfos);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateGraphicsPipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateGraphicsPipelines(device, pipelineCache, count,
+ pCreateInfos, pAllocator, pPipelines);
- PostCreateGraphicsPipelines(device, pipelineCache, count, pPipelines, result);
+ PostCreateGraphicsPipelines(device, pipelineCache, count, pPipelines,
+ result);
return result;
}
-bool PreCreateComputePipelines(
- VkDevice device,
- const VkComputePipelineCreateInfo* pCreateInfos)
-{
- if(pCreateInfos != nullptr)
- {
- if(pCreateInfos->sType != VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateComputePipelines parameter, VkStructureType pCreateInfos->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->stage.sType != VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateComputePipelines parameter, VkStructureType pCreateInfos->cs.sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfos->stage.pSpecializationInfo != nullptr)
- {
- if(pCreateInfos->stage.pSpecializationInfo->pMapEntries != nullptr)
- {
- }
- if(pCreateInfos->stage.pSpecializationInfo->pData != nullptr)
- {
- }
- }
+bool
+PreCreateComputePipelines(VkDevice device,
+ const VkComputePipelineCreateInfo *pCreateInfos) {
+ if (pCreateInfos != nullptr) {
+ if (pCreateInfos->sType !=
+ VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateComputePipelines parameter, VkStructureType "
+ "pCreateInfos->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->stage.sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateComputePipelines parameter, VkStructureType "
+ "pCreateInfos->cs.sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfos->stage.pSpecializationInfo != nullptr) {
+ if (pCreateInfos->stage.pSpecializationInfo->pMapEntries !=
+ nullptr) {
+ }
+ if (pCreateInfos->stage.pSpecializationInfo->pData != nullptr) {
+ }
+ }
}
return true;
}
-bool PostCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t count,
- VkPipeline* pPipelines,
- VkResult result)
-{
+bool PostCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache,
+ uint32_t count, VkPipeline *pPipelines,
+ VkResult result) {
-
-
- if(pPipelines != nullptr)
- {
+ if (pPipelines != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateComputePipelines parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateComputePipelines parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(
- VkDevice device,
- VkPipelineCache pipelineCache,
- uint32_t count,
- const VkComputePipelineCreateInfo* pCreateInfos,
- const VkAllocationCallbacks* pAllocator,
- VkPipeline* pPipelines)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache,
+ uint32_t count,
+ const VkComputePipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator,
+ VkPipeline *pPipelines) {
PreCreateComputePipelines(device, pCreateInfos);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateComputePipelines(device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)->CreateComputePipelines(
+ device, pipelineCache, count, pCreateInfos, pAllocator, pPipelines);
- PostCreateComputePipelines(device, pipelineCache, count, pPipelines, result);
+ PostCreateComputePipelines(device, pipelineCache, count, pPipelines,
+ result);
return result;
}
-bool PreCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreatePipelineLayout parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->pSetLayouts != nullptr)
- {
- }
- if(pCreateInfo->pPushConstantRanges != nullptr)
- {
- }
+bool PreCreatePipelineLayout(VkDevice device,
+ const VkPipelineLayoutCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreatePipelineLayout parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->pSetLayouts != nullptr) {
+ }
+ if (pCreateInfo->pPushConstantRanges != nullptr) {
+ }
}
return true;
}
-bool PostCreatePipelineLayout(
- VkDevice device,
- VkPipelineLayout* pPipelineLayout,
- VkResult result)
-{
+bool PostCreatePipelineLayout(VkDevice device,
+ VkPipelineLayout *pPipelineLayout,
+ VkResult result) {
- if(pPipelineLayout != nullptr)
- {
+ if (pPipelineLayout != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreatePipelineLayout parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreatePipelineLayout parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(
- VkDevice device,
- const VkPipelineLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkPipelineLayout* pPipelineLayout)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreatePipelineLayout(VkDevice device,
+ const VkPipelineLayoutCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkPipelineLayout *pPipelineLayout) {
PreCreatePipelineLayout(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->CreatePipelineLayout(device, pCreateInfo,
+ pAllocator, pPipelineLayout);
PostCreatePipelineLayout(device, pPipelineLayout, result);
return result;
}
-bool PreCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->magFilter < VK_FILTER_BEGIN_RANGE ||
- pCreateInfo->magFilter > VK_FILTER_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkFilter pCreateInfo->magFilter, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->minFilter < VK_FILTER_BEGIN_RANGE ||
- pCreateInfo->minFilter > VK_FILTER_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkFilter pCreateInfo->minFilter, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->mipmapMode < VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE ||
- pCreateInfo->mipmapMode > VK_SAMPLER_MIPMAP_MODE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkSamplerMipmapMode pCreateInfo->mipmapMode, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->addressModeU < VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE ||
- pCreateInfo->addressModeU > VK_SAMPLER_ADDRESS_MODE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkTexAddress pCreateInfo->addressModeU, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->addressModeV < VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE ||
- pCreateInfo->addressModeV > VK_SAMPLER_ADDRESS_MODE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkTexAddress pCreateInfo->addressModeV, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->addressModeW < VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE ||
- pCreateInfo->addressModeW > VK_SAMPLER_ADDRESS_MODE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkTexAddress pCreateInfo->addressModeW, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->compareEnable)
- {
- if(pCreateInfo->compareOp < VK_COMPARE_OP_BEGIN_RANGE ||
- pCreateInfo->compareOp > VK_COMPARE_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkCompareOp pCreateInfo->compareOp, is an unrecognized enumerator");
+bool PreCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSampler parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->magFilter < VK_FILTER_BEGIN_RANGE ||
+ pCreateInfo->magFilter > VK_FILTER_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSampler parameter, VkFilter "
+ "pCreateInfo->magFilter, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->minFilter < VK_FILTER_BEGIN_RANGE ||
+ pCreateInfo->minFilter > VK_FILTER_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSampler parameter, VkFilter "
+ "pCreateInfo->minFilter, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->mipmapMode < VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE ||
+ pCreateInfo->mipmapMode > VK_SAMPLER_MIPMAP_MODE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSampler parameter, VkSamplerMipmapMode "
+ "pCreateInfo->mipmapMode, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->addressModeU < VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE ||
+ pCreateInfo->addressModeU > VK_SAMPLER_ADDRESS_MODE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSampler parameter, VkTexAddress "
+ "pCreateInfo->addressModeU, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->addressModeV < VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE ||
+ pCreateInfo->addressModeV > VK_SAMPLER_ADDRESS_MODE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSampler parameter, VkTexAddress "
+ "pCreateInfo->addressModeV, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->addressModeW < VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE ||
+ pCreateInfo->addressModeW > VK_SAMPLER_ADDRESS_MODE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSampler parameter, VkTexAddress "
+ "pCreateInfo->addressModeW, is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->compareEnable) {
+ if (pCreateInfo->compareOp < VK_COMPARE_OP_BEGIN_RANGE ||
+ pCreateInfo->compareOp > VK_COMPARE_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateSampler parameter, VkCompareOp "
+ "pCreateInfo->compareOp, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfo->borderColor < VK_BORDER_COLOR_BEGIN_RANGE ||
+ pCreateInfo->borderColor > VK_BORDER_COLOR_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateSampler parameter, VkBorderColor "
+ "pCreateInfo->borderColor, is an unrecognized enumerator");
return false;
}
}
- if(pCreateInfo->borderColor < VK_BORDER_COLOR_BEGIN_RANGE ||
- pCreateInfo->borderColor > VK_BORDER_COLOR_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateSampler parameter, VkBorderColor pCreateInfo->borderColor, is an unrecognized enumerator");
- return false;
- }
- }
return true;
}
-bool PostCreateSampler(
- VkDevice device,
- VkSampler* pSampler,
- VkResult result)
-{
+bool PostCreateSampler(VkDevice device, VkSampler *pSampler, VkResult result) {
- if(pSampler != nullptr)
- {
+ if (pSampler != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateSampler parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason = "vkCreateSampler parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(
- VkDevice device,
- const VkSamplerCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSampler* pSampler)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSampler *pSampler) {
PreCreateSampler(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
PostCreateSampler(device, pSampler, result);
@@ -4349,49 +4039,51 @@
}
bool PreCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateDescriptorSetLayout parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->pBindings != nullptr)
- {
- if(pCreateInfo->pBindings->descriptorType < VK_DESCRIPTOR_TYPE_BEGIN_RANGE ||
- pCreateInfo->pBindings->descriptorType > VK_DESCRIPTOR_TYPE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateDescriptorSetLayout parameter, VkDescriptorType pCreateInfo->pBindings->descriptorType, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->pBindings->pImmutableSamplers != nullptr)
- {
- }
- }
+ VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateDescriptorSetLayout parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->pBindings != nullptr) {
+ if (pCreateInfo->pBindings->descriptorType <
+ VK_DESCRIPTOR_TYPE_BEGIN_RANGE ||
+ pCreateInfo->pBindings->descriptorType >
+ VK_DESCRIPTOR_TYPE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateDescriptorSetLayout parameter, "
+ "VkDescriptorType "
+ "pCreateInfo->pBindings->descriptorType, "
+ "is an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->pBindings->pImmutableSamplers != nullptr) {
+ }
+ }
}
return true;
}
-bool PostCreateDescriptorSetLayout(
- VkDevice device,
- VkDescriptorSetLayout* pSetLayout,
- VkResult result)
-{
+bool PostCreateDescriptorSetLayout(VkDevice device,
+ VkDescriptorSetLayout *pSetLayout,
+ VkResult result) {
- if(pSetLayout != nullptr)
- {
+ if (pSetLayout != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateDescriptorSetLayout parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateDescriptorSetLayout parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
@@ -4399,1562 +4091,1471 @@
}
VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(
- VkDevice device,
- const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorSetLayout* pSetLayout)
-{
+ VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorSetLayout *pSetLayout) {
PreCreateDescriptorSetLayout(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->CreateDescriptorSetLayout(device, pCreateInfo,
+ pAllocator, pSetLayout);
PostCreateDescriptorSetLayout(device, pSetLayout, result);
return result;
}
-bool PreCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateDescriptorPool parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->pPoolSizes != nullptr)
- {
- if(pCreateInfo->pPoolSizes->type < VK_DESCRIPTOR_TYPE_BEGIN_RANGE ||
- pCreateInfo->pPoolSizes->type > VK_DESCRIPTOR_TYPE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateDescriptorPool parameter, VkDescriptorType pCreateInfo->pTypeCount->type, is an unrecognized enumerator");
- return false;
- }
- }
+bool PreCreateDescriptorPool(VkDevice device,
+ const VkDescriptorPoolCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateDescriptorPool parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->pPoolSizes != nullptr) {
+ if (pCreateInfo->pPoolSizes->type <
+ VK_DESCRIPTOR_TYPE_BEGIN_RANGE ||
+ pCreateInfo->pPoolSizes->type > VK_DESCRIPTOR_TYPE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateDescriptorPool parameter, "
+ "VkDescriptorType "
+ "pCreateInfo->pTypeCount->type, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ }
}
return true;
}
-bool PostCreateDescriptorPool(
- VkDevice device,
- uint32_t maxSets,
- VkDescriptorPool* pDescriptorPool,
- VkResult result)
-{
+bool PostCreateDescriptorPool(VkDevice device, uint32_t maxSets,
+ VkDescriptorPool *pDescriptorPool,
+ VkResult result) {
- /* TODOVV: How do we validate maxSets? Probably belongs in the limits layer? */
+ /* TODOVV: How do we validate maxSets? Probably belongs in the limits layer?
+ */
- if(pDescriptorPool != nullptr)
- {
+ if (pDescriptorPool != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateDescriptorPool parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateDescriptorPool parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(
- VkDevice device,
- const VkDescriptorPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDescriptorPool* pDescriptorPool)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDescriptorPool(VkDevice device,
+ const VkDescriptorPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorPool *pDescriptorPool) {
PreCreateDescriptorPool(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->CreateDescriptorPool(device, pCreateInfo,
+ pAllocator, pDescriptorPool);
- PostCreateDescriptorPool(device, pCreateInfo->maxSets, pDescriptorPool, result);
+ PostCreateDescriptorPool(device, pCreateInfo->maxSets, pDescriptorPool,
+ result);
return result;
}
-bool PostResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkResult result)
-{
+bool PostResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+ VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkResetDescriptorPool parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkResetDescriptorPool parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- VkDescriptorPoolResetFlags flags)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->ResetDescriptorPool(device, descriptorPool, flags);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
+ VkDescriptorPoolResetFlags flags) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->ResetDescriptorPool(device, descriptorPool, flags);
PostResetDescriptorPool(device, descriptorPool, result);
return result;
}
-bool PreAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetLayout* pSetLayouts)
-{
- if(pSetLayouts != nullptr)
- {
+bool PreAllocateDescriptorSets(VkDevice device,
+ const VkDescriptorSetLayout *pSetLayouts) {
+ if (pSetLayouts != nullptr) {
}
return true;
}
-bool PostAllocateDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t count,
- VkDescriptorSet* pDescriptorSets,
- VkResult result)
-{
+bool PostAllocateDescriptorSets(VkDevice device,
+ VkDescriptorPool descriptorPool, uint32_t count,
+ VkDescriptorSet *pDescriptorSets,
+ VkResult result) {
-
- if(pDescriptorSets != nullptr)
- {
+ if (pDescriptorSets != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkAllocateDescriptorSets parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkAllocateDescriptorSets parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(
- VkDevice device,
- const VkDescriptorSetAllocateInfo* pAllocateInfo,
- VkDescriptorSet* pDescriptorSets)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAllocateDescriptorSets(VkDevice device,
+ const VkDescriptorSetAllocateInfo *pAllocateInfo,
+ VkDescriptorSet *pDescriptorSets) {
PreAllocateDescriptorSets(device, pAllocateInfo->pSetLayouts);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
- PostAllocateDescriptorSets(device, pAllocateInfo->descriptorPool, pAllocateInfo->descriptorSetCount, pDescriptorSets, result);
+ PostAllocateDescriptorSets(device, pAllocateInfo->descriptorPool,
+ pAllocateInfo->descriptorSetCount,
+ pDescriptorSets, result);
return result;
}
-bool PreFreeDescriptorSets(
- VkDevice device,
- const VkDescriptorSet* pDescriptorSets)
-{
- if(pDescriptorSets != nullptr)
- {
+bool PreFreeDescriptorSets(VkDevice device,
+ const VkDescriptorSet *pDescriptorSets) {
+ if (pDescriptorSets != nullptr) {
}
return true;
}
-bool PostFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t count,
- VkResult result)
-{
+bool PostFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
+ uint32_t count, VkResult result) {
-
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkFreeDescriptorSets parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkFreeDescriptorSets parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(
- VkDevice device,
- VkDescriptorPool descriptorPool,
- uint32_t count,
- const VkDescriptorSet* pDescriptorSets)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
+ uint32_t count,
+ const VkDescriptorSet *pDescriptorSets) {
PreFreeDescriptorSets(device, pDescriptorSets);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->FreeDescriptorSets(device, descriptorPool, count, pDescriptorSets);
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->FreeDescriptorSets(device, descriptorPool, count,
+ pDescriptorSets);
PostFreeDescriptorSets(device, descriptorPool, count, result);
return result;
}
-bool PreUpdateDescriptorSets(
- VkDevice device,
- const VkWriteDescriptorSet* pDescriptorWrites,
- const VkCopyDescriptorSet* pDescriptorCopies)
-{
- if(pDescriptorWrites != nullptr)
- {
- if(pDescriptorWrites->sType != VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkUpdateDescriptorSets parameter, VkStructureType pDescriptorWrites->sType, is an invalid enumerator");
- return false;
- }
- if(pDescriptorWrites->descriptorType < VK_DESCRIPTOR_TYPE_BEGIN_RANGE ||
- pDescriptorWrites->descriptorType > VK_DESCRIPTOR_TYPE_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkUpdateDescriptorSets parameter, VkDescriptorType pDescriptorWrites->descriptorType, is an unrecognized enumerator");
- return false;
- }
- /* TODO: Validate other parts of pImageInfo, pBufferInfo, pTexelBufferView? */
- /* TODO: This test should probably only be done if descriptorType is correct type of descriptor */
- if(pDescriptorWrites->pImageInfo != nullptr)
- {
- if (((pDescriptorWrites->pImageInfo->imageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (pDescriptorWrites->pImageInfo->imageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (pDescriptorWrites->pImageInfo->imageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkUpdateDescriptorSets parameter, VkImageLayout pDescriptorWrites->pDescriptors->imageLayout, is an unrecognized enumerator");
- return false;
- }
- }
+bool PreUpdateDescriptorSets(VkDevice device,
+ const VkWriteDescriptorSet *pDescriptorWrites,
+ const VkCopyDescriptorSet *pDescriptorCopies) {
+ if (pDescriptorWrites != nullptr) {
+ if (pDescriptorWrites->sType !=
+ VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkUpdateDescriptorSets parameter, VkStructureType "
+ "pDescriptorWrites->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pDescriptorWrites->descriptorType <
+ VK_DESCRIPTOR_TYPE_BEGIN_RANGE ||
+ pDescriptorWrites->descriptorType > VK_DESCRIPTOR_TYPE_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkUpdateDescriptorSets parameter, VkDescriptorType "
+ "pDescriptorWrites->descriptorType, is an unrecognized "
+ "enumerator");
+ return false;
+ }
+ /* TODO: Validate other parts of pImageInfo, pBufferInfo,
+ * pTexelBufferView? */
+ /* TODO: This test should probably only be done if descriptorType is
+ * correct type of descriptor */
+ if (pDescriptorWrites->pImageInfo != nullptr) {
+ if (((pDescriptorWrites->pImageInfo->imageLayout <
+ VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
+ (pDescriptorWrites->pImageInfo->imageLayout >
+ VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (pDescriptorWrites->pImageInfo->imageLayout !=
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkUpdateDescriptorSets parameter, VkImageLayout "
+ "pDescriptorWrites->pDescriptors->imageLayout, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ }
}
- if(pDescriptorCopies != nullptr)
- {
- if(pDescriptorCopies->sType != VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkUpdateDescriptorSets parameter, VkStructureType pDescriptorCopies->sType, is an invalid enumerator");
- return false;
- }
+ if (pDescriptorCopies != nullptr) {
+ if (pDescriptorCopies->sType != VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkUpdateDescriptorSets parameter, VkStructureType "
+ "pDescriptorCopies->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(
- VkDevice device,
- uint32_t descriptorWriteCount,
- const VkWriteDescriptorSet* pDescriptorWrites,
- uint32_t descriptorCopyCount,
- const VkCopyDescriptorSet* pDescriptorCopies)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet *pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet *pDescriptorCopies) {
PreUpdateDescriptorSets(device, pDescriptorWrites, pDescriptorCopies);
- get_dispatch_table(pc_device_table_map, device)->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+ get_dispatch_table(pc_device_table_map, device)
+ ->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies);
}
-bool PreCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateFramebuffer parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->pAttachments != nullptr)
- {
- }
+bool PreCreateFramebuffer(VkDevice device,
+ const VkFramebufferCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateFramebuffer parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->pAttachments != nullptr) {
+ }
}
return true;
}
-bool PostCreateFramebuffer(
- VkDevice device,
- VkFramebuffer* pFramebuffer,
- VkResult result)
-{
+bool PostCreateFramebuffer(VkDevice device, VkFramebuffer *pFramebuffer,
+ VkResult result) {
- if(pFramebuffer != nullptr)
- {
+ if (pFramebuffer != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateFramebuffer parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateFramebuffer parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(
- VkDevice device,
- const VkFramebufferCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkFramebuffer* pFramebuffer)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateFramebuffer(VkDevice device,
+ const VkFramebufferCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkFramebuffer *pFramebuffer) {
PreCreateFramebuffer(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
PostCreateFramebuffer(device, pFramebuffer, result);
return result;
}
-bool PreCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->pAttachments != nullptr)
- {
- if(pCreateInfo->pAttachments->format < VK_FORMAT_BEGIN_RANGE ||
- pCreateInfo->pAttachments->format > VK_FORMAT_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkFormat pCreateInfo->pAttachments->format, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->pAttachments->loadOp < VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE ||
- pCreateInfo->pAttachments->loadOp > VK_ATTACHMENT_LOAD_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkAttachmentLoadOp pCreateInfo->pAttachments->loadOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->pAttachments->storeOp < VK_ATTACHMENT_STORE_OP_BEGIN_RANGE ||
- pCreateInfo->pAttachments->storeOp > VK_ATTACHMENT_STORE_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkAttachmentStoreOp pCreateInfo->pAttachments->storeOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->pAttachments->stencilLoadOp < VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE ||
- pCreateInfo->pAttachments->stencilLoadOp > VK_ATTACHMENT_LOAD_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkAttachmentLoadOp pCreateInfo->pAttachments->stencilLoadOp, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->pAttachments->stencilStoreOp < VK_ATTACHMENT_STORE_OP_BEGIN_RANGE ||
- pCreateInfo->pAttachments->stencilStoreOp > VK_ATTACHMENT_STORE_OP_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkAttachmentStoreOp pCreateInfo->pAttachments->stencilStoreOp, is an unrecognized enumerator");
- return false;
- }
- if (((pCreateInfo->pAttachments->initialLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (pCreateInfo->pAttachments->initialLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (pCreateInfo->pAttachments->initialLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkImageLayout pCreateInfo->pAttachments->initialLayout, is an unrecognized enumerator");
- return false;
- }
- if (((pCreateInfo->pAttachments->initialLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (pCreateInfo->pAttachments->initialLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (pCreateInfo->pAttachments->initialLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkImageLayout pCreateInfo->pAttachments->finalLayout, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfo->pSubpasses != nullptr)
- {
- if(pCreateInfo->pSubpasses->pipelineBindPoint < VK_PIPELINE_BIND_POINT_BEGIN_RANGE ||
- pCreateInfo->pSubpasses->pipelineBindPoint > VK_PIPELINE_BIND_POINT_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkPipelineBindPoint pCreateInfo->pSubpasses->pipelineBindPoint, is an unrecognized enumerator");
- return false;
- }
- if(pCreateInfo->pSubpasses->pInputAttachments != nullptr)
- {
- if (((pCreateInfo->pSubpasses->pInputAttachments->layout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (pCreateInfo->pSubpasses->pInputAttachments->layout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (pCreateInfo->pSubpasses->pInputAttachments->layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkImageLayout pCreateInfo->pSubpasses->pInputAttachments->layout, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfo->pSubpasses->pColorAttachments != nullptr)
- {
- if (((pCreateInfo->pSubpasses->pColorAttachments->layout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (pCreateInfo->pSubpasses->pColorAttachments->layout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (pCreateInfo->pSubpasses->pColorAttachments->layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkImageLayout pCreateInfo->pSubpasses->pColorAttachments->layout, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfo->pSubpasses->pResolveAttachments != nullptr)
- {
- if (((pCreateInfo->pSubpasses->pResolveAttachments->layout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (pCreateInfo->pSubpasses->pResolveAttachments->layout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (pCreateInfo->pSubpasses->pResolveAttachments->layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkImageLayout pCreateInfo->pSubpasses->pResolveAttachments->layout, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfo->pSubpasses->pDepthStencilAttachment &&
- ((pCreateInfo->pSubpasses->pDepthStencilAttachment->layout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (pCreateInfo->pSubpasses->pDepthStencilAttachment->layout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (pCreateInfo->pSubpasses->pDepthStencilAttachment->layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateRenderPass parameter, VkImageLayout pCreateInfo->pSubpasses->pDepthStencilAttachment->layout, is an unrecognized enumerator");
- return false;
- }
- }
- if(pCreateInfo->pDependencies != nullptr)
- {
- }
+bool PreCreateRenderPass(VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->pAttachments != nullptr) {
+ if (pCreateInfo->pAttachments->format < VK_FORMAT_BEGIN_RANGE ||
+ pCreateInfo->pAttachments->format > VK_FORMAT_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateRenderPass parameter, VkFormat "
+ "pCreateInfo->pAttachments->format, is "
+ "an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->pAttachments->loadOp <
+ VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE ||
+ pCreateInfo->pAttachments->loadOp >
+ VK_ATTACHMENT_LOAD_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateRenderPass parameter, "
+ "VkAttachmentLoadOp "
+ "pCreateInfo->pAttachments->loadOp, is "
+ "an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->pAttachments->storeOp <
+ VK_ATTACHMENT_STORE_OP_BEGIN_RANGE ||
+ pCreateInfo->pAttachments->storeOp >
+ VK_ATTACHMENT_STORE_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateRenderPass parameter, "
+ "VkAttachmentStoreOp "
+ "pCreateInfo->pAttachments->storeOp, is "
+ "an unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->pAttachments->stencilLoadOp <
+ VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE ||
+ pCreateInfo->pAttachments->stencilLoadOp >
+ VK_ATTACHMENT_LOAD_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkAttachmentLoadOp "
+ "pCreateInfo->pAttachments->stencilLoadOp, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->pAttachments->stencilStoreOp <
+ VK_ATTACHMENT_STORE_OP_BEGIN_RANGE ||
+ pCreateInfo->pAttachments->stencilStoreOp >
+ VK_ATTACHMENT_STORE_OP_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkAttachmentStoreOp "
+ "pCreateInfo->pAttachments->stencilStoreOp, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ if (((pCreateInfo->pAttachments->initialLayout <
+ VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
+ (pCreateInfo->pAttachments->initialLayout >
+ VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (pCreateInfo->pAttachments->initialLayout !=
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkImageLayout "
+ "pCreateInfo->pAttachments->initialLayout, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ if (((pCreateInfo->pAttachments->initialLayout <
+ VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
+ (pCreateInfo->pAttachments->initialLayout >
+ VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (pCreateInfo->pAttachments->initialLayout !=
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK", "vkCreateRenderPass parameter, "
+ "VkImageLayout "
+ "pCreateInfo->pAttachments->finalLayout, "
+ "is an unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfo->pSubpasses != nullptr) {
+ if (pCreateInfo->pSubpasses->pipelineBindPoint <
+ VK_PIPELINE_BIND_POINT_BEGIN_RANGE ||
+ pCreateInfo->pSubpasses->pipelineBindPoint >
+ VK_PIPELINE_BIND_POINT_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkPipelineBindPoint "
+ "pCreateInfo->pSubpasses->pipelineBindPoint, is an "
+ "unrecognized enumerator");
+ return false;
+ }
+ if (pCreateInfo->pSubpasses->pInputAttachments != nullptr) {
+ if (((pCreateInfo->pSubpasses->pInputAttachments->layout <
+ VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
+ (pCreateInfo->pSubpasses->pInputAttachments->layout >
+ VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (pCreateInfo->pSubpasses->pInputAttachments->layout !=
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkImageLayout "
+ "pCreateInfo->pSubpasses->pInputAttachments->"
+ "layout, is an unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfo->pSubpasses->pColorAttachments != nullptr) {
+ if (((pCreateInfo->pSubpasses->pColorAttachments->layout <
+ VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
+ (pCreateInfo->pSubpasses->pColorAttachments->layout >
+ VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (pCreateInfo->pSubpasses->pColorAttachments->layout !=
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkImageLayout "
+ "pCreateInfo->pSubpasses->pColorAttachments->"
+ "layout, is an unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfo->pSubpasses->pResolveAttachments != nullptr) {
+ if (((pCreateInfo->pSubpasses->pResolveAttachments->layout <
+ VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
+ (pCreateInfo->pSubpasses->pResolveAttachments->layout >
+ VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (pCreateInfo->pSubpasses->pResolveAttachments->layout !=
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkImageLayout "
+ "pCreateInfo->pSubpasses->pResolveAttachments->"
+ "layout, is an unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfo->pSubpasses->pDepthStencilAttachment &&
+ ((pCreateInfo->pSubpasses->pDepthStencilAttachment->layout <
+ VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
+ (pCreateInfo->pSubpasses->pDepthStencilAttachment->layout >
+ VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (pCreateInfo->pSubpasses->pDepthStencilAttachment->layout !=
+ VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
+ "PARAMCHECK",
+ "vkCreateRenderPass parameter, VkImageLayout "
+ "pCreateInfo->pSubpasses->pDepthStencilAttachment->"
+ "layout, is an unrecognized enumerator");
+ return false;
+ }
+ }
+ if (pCreateInfo->pDependencies != nullptr) {
+ }
}
return true;
}
-bool PostCreateRenderPass(
- VkDevice device,
- VkRenderPass* pRenderPass,
- VkResult result)
-{
+bool PostCreateRenderPass(VkDevice device, VkRenderPass *pRenderPass,
+ VkResult result) {
- if(pRenderPass != nullptr)
- {
+ if (pRenderPass != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateRenderPass parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateRenderPass parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(
- VkDevice device,
- const VkRenderPassCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkRenderPass* pRenderPass)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateRenderPass(VkDevice device,
+ const VkRenderPassCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkRenderPass *pRenderPass) {
PreCreateRenderPass(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
PostCreateRenderPass(device, pRenderPass, result);
return result;
}
-bool PostGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity)
-{
+bool PostGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass,
+ VkExtent2D *pGranularity) {
-
- if(pGranularity != nullptr)
- {
+ if (pGranularity != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(
- VkDevice device,
- VkRenderPass renderPass,
- VkExtent2D* pGranularity)
-{
- get_dispatch_table(pc_device_table_map, device)->GetRenderAreaGranularity(device, renderPass, pGranularity);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass,
+ VkExtent2D *pGranularity) {
+ get_dispatch_table(pc_device_table_map, device)
+ ->GetRenderAreaGranularity(device, renderPass, pGranularity);
PostGetRenderAreaGranularity(device, renderPass, pGranularity);
}
-bool PreCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCreateCommandPool parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
+bool PreCreateCommandPool(VkDevice device,
+ const VkCommandPoolCreateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType != VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCreateCommandPool parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCreateCommandPool(
- VkDevice device,
- VkCommandPool* pCommandPool,
- VkResult result)
-{
+bool PostCreateCommandPool(VkDevice device, VkCommandPool *pCommandPool,
+ VkResult result) {
- if(pCommandPool != nullptr)
- {
+ if (pCommandPool != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkCreateCommandPool parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkCreateCommandPool parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(
- VkDevice device,
- const VkCommandPoolCreateInfo* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkCommandPool* pCommandPool)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateCommandPool(VkDevice device,
+ const VkCommandPoolCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkCommandPool *pCommandPool) {
PreCreateCommandPool(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->CreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
PostCreateCommandPool(device, pCommandPool, result);
return result;
}
-bool PostResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags,
- VkResult result)
-{
+bool PostResetCommandPool(VkDevice device, VkCommandPool commandPool,
+ VkCommandPoolResetFlags flags, VkResult result) {
-
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkResetCommandPool parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkResetCommandPool parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(
- VkDevice device,
- VkCommandPool commandPool,
- VkCommandPoolResetFlags flags)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, device)->ResetCommandPool(device, commandPool, flags);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetCommandPool(VkDevice device, VkCommandPool commandPool,
+ VkCommandPoolResetFlags flags) {
+ VkResult result = get_dispatch_table(pc_device_table_map, device)
+ ->ResetCommandPool(device, commandPool, flags);
PostResetCommandPool(device, commandPool, flags, result);
return result;
}
-bool PreCreateCommandBuffer(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pCreateInfo)
-{
- if(pCreateInfo != nullptr)
- {
- if(pCreateInfo->sType != VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkAllocateCommandBuffers parameter, VkStructureType pCreateInfo->sType, is an invalid enumerator");
- return false;
- }
- if(pCreateInfo->level < VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE ||
- pCreateInfo->level > VK_COMMAND_BUFFER_LEVEL_END_RANGE)
- {
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkAllocateCommandBuffers parameter, VkCommandBufferLevel pCreateInfo->level, is an unrecognized enumerator");
- return false;
- }
+bool PreCreateCommandBuffer(VkDevice device,
+ const VkCommandBufferAllocateInfo *pCreateInfo) {
+ if (pCreateInfo != nullptr) {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkAllocateCommandBuffers parameter, VkStructureType "
+ "pCreateInfo->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pCreateInfo->level < VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE ||
+ pCreateInfo->level > VK_COMMAND_BUFFER_LEVEL_END_RANGE) {
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkAllocateCommandBuffers parameter, VkCommandBufferLevel "
+ "pCreateInfo->level, is an unrecognized enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCreateCommandBuffer(
- VkDevice device,
- VkCommandBuffer* pCommandBuffer,
- VkResult result)
-{
+bool PostCreateCommandBuffer(VkDevice device, VkCommandBuffer *pCommandBuffer,
+ VkResult result) {
- if(pCommandBuffer != nullptr)
- {
+ if (pCommandBuffer != nullptr) {
}
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkAllocateCommandBuffers parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkAllocateCommandBuffers parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(device), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(
- VkDevice device,
- const VkCommandBufferAllocateInfo* pCreateInfo,
- VkCommandBuffer* pCommandBuffer)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAllocateCommandBuffers(VkDevice device,
+ const VkCommandBufferAllocateInfo *pCreateInfo,
+ VkCommandBuffer *pCommandBuffer) {
PreCreateCommandBuffer(device, pCreateInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, device)->AllocateCommandBuffers(device, pCreateInfo, pCommandBuffer);
+ VkResult result =
+ get_dispatch_table(pc_device_table_map, device)
+ ->AllocateCommandBuffers(device, pCreateInfo, pCommandBuffer);
PostCreateCommandBuffer(device, pCommandBuffer, result);
return result;
}
-bool PreBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo)
-{
- if(pBeginInfo != nullptr)
- {
- if(pBeginInfo->sType != VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkBeginCommandBuffer parameter, VkStructureType pBeginInfo->sType, is an invalid enumerator");
- return false;
- }
+bool PreBeginCommandBuffer(VkCommandBuffer commandBuffer,
+ const VkCommandBufferBeginInfo *pBeginInfo) {
+ if (pBeginInfo != nullptr) {
+ if (pBeginInfo->sType != VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkBeginCommandBuffer parameter, VkStructureType "
+ "pBeginInfo->sType, is an invalid enumerator");
+ return false;
+ }
}
return true;
}
-bool PostBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkResult result)
-{
+bool PostBeginCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkBeginCommandBuffer parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkBeginCommandBuffer parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(
- VkCommandBuffer commandBuffer,
- const VkCommandBufferBeginInfo* pBeginInfo)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkBeginCommandBuffer(VkCommandBuffer commandBuffer,
+ const VkCommandBufferBeginInfo *pBeginInfo) {
PreBeginCommandBuffer(commandBuffer, pBeginInfo);
- VkResult result = get_dispatch_table(pc_device_table_map, commandBuffer)->BeginCommandBuffer(commandBuffer, pBeginInfo);
+ VkResult result = get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->BeginCommandBuffer(commandBuffer, pBeginInfo);
PostBeginCommandBuffer(commandBuffer, result);
return result;
}
-bool PostEndCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkResult result)
-{
+bool PostEndCommandBuffer(VkCommandBuffer commandBuffer, VkResult result) {
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkEndCommandBuffer parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkEndCommandBuffer parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(
- VkCommandBuffer commandBuffer)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, commandBuffer)->EndCommandBuffer(commandBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEndCommandBuffer(VkCommandBuffer commandBuffer) {
+ VkResult result = get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->EndCommandBuffer(commandBuffer);
PostEndCommandBuffer(commandBuffer, result);
return result;
}
-bool PostResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags,
- VkResult result)
-{
+bool PostResetCommandBuffer(VkCommandBuffer commandBuffer,
+ VkCommandBufferResetFlags flags, VkResult result) {
-
- if(result < VK_SUCCESS)
- {
- std::string reason = "vkResetCommandBuffer parameter, VkResult result, is " + EnumeratorString(result);
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK", "%s", reason.c_str());
+ if (result < VK_SUCCESS) {
+ std::string reason =
+ "vkResetCommandBuffer parameter, VkResult result, is " +
+ EnumeratorString(result);
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "%s", reason.c_str());
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(
- VkCommandBuffer commandBuffer,
- VkCommandBufferResetFlags flags)
-{
- VkResult result = get_dispatch_table(pc_device_table_map, commandBuffer)->ResetCommandBuffer(commandBuffer, flags);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkResetCommandBuffer(VkCommandBuffer commandBuffer,
+ VkCommandBufferResetFlags flags) {
+ VkResult result = get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->ResetCommandBuffer(commandBuffer, flags);
PostResetCommandBuffer(commandBuffer, flags, result);
return result;
}
-bool PostCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline)
-{
+bool PostCmdBindPipeline(VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipeline pipeline) {
- if(pipelineBindPoint < VK_PIPELINE_BIND_POINT_BEGIN_RANGE ||
- pipelineBindPoint > VK_PIPELINE_BIND_POINT_END_RANGE)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdBindPipeline parameter, VkPipelineBindPoint pipelineBindPoint, is an unrecognized enumerator");
+ if (pipelineBindPoint < VK_PIPELINE_BIND_POINT_BEGIN_RANGE ||
+ pipelineBindPoint > VK_PIPELINE_BIND_POINT_END_RANGE) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdBindPipeline parameter, VkPipelineBindPoint "
+ "pipelineBindPoint, is an unrecognized enumerator");
return false;
}
-
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipeline pipeline)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindPipeline(VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipeline pipeline) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
PostCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
+ uint32_t viewportCount, const VkViewport *pViewports) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetViewport(
+ commandBuffer, firstViewport, viewportCount, pViewports);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
+ uint32_t scissorCount, const VkRect2D *pScissors) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetLineWidth(commandBuffer, lineWidth);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetLineWidth(commandBuffer, lineWidth);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetDepthBias(VkCommandBuffer commandBuffer,
+ float depthBiasConstantFactor, float depthBiasClamp,
+ float depthBiasSlopeFactor) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor,
+ depthBiasClamp, depthBiasSlopeFactor);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetBlendConstants(commandBuffer, blendConstants);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,
+ const float blendConstants[4]) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetBlendConstants(commandBuffer, blendConstants);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
+ float maxDepthBounds) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask,
+ uint32_t compareMask) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask, uint32_t writeMask) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetStencilReference(commandBuffer, faceMask, reference);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetStencilReference(VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask, uint32_t reference) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetStencilReference(commandBuffer, faceMask, reference);
}
-bool PreCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- const VkDescriptorSet* pDescriptorSets,
- const uint32_t* pDynamicOffsets)
-{
- if(pDescriptorSets != nullptr)
- {
+bool PreCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
+ const VkDescriptorSet *pDescriptorSets,
+ const uint32_t *pDynamicOffsets) {
+ if (pDescriptorSets != nullptr) {
}
- if(pDynamicOffsets != nullptr)
- {
+ if (pDynamicOffsets != nullptr) {
}
return true;
}
-bool PostCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t setCount,
- uint32_t dynamicOffsetCount)
-{
+bool PostCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipelineLayout layout, uint32_t firstSet,
+ uint32_t setCount, uint32_t dynamicOffsetCount) {
- if(pipelineBindPoint < VK_PIPELINE_BIND_POINT_BEGIN_RANGE ||
- pipelineBindPoint > VK_PIPELINE_BIND_POINT_END_RANGE)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdBindDescriptorSets parameter, VkPipelineBindPoint pipelineBindPoint, is an unrecognized enumerator");
+ if (pipelineBindPoint < VK_PIPELINE_BIND_POINT_BEGIN_RANGE ||
+ pipelineBindPoint > VK_PIPELINE_BIND_POINT_END_RANGE) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdBindDescriptorSets parameter, VkPipelineBindPoint "
+ "pipelineBindPoint, is an unrecognized enumerator");
return false;
}
-
-
-
-
return true;
}
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(
- VkCommandBuffer commandBuffer,
- VkPipelineBindPoint pipelineBindPoint,
- VkPipelineLayout layout,
- uint32_t firstSet,
- uint32_t setCount,
- const VkDescriptorSet* pDescriptorSets,
- uint32_t dynamicOffsetCount,
- const uint32_t* pDynamicOffsets)
-{
+ VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
+ VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount,
+ const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
+ const uint32_t *pDynamicOffsets) {
PreCmdBindDescriptorSets(commandBuffer, pDescriptorSets, pDynamicOffsets);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout,
+ firstSet, setCount, pDescriptorSets,
+ dynamicOffsetCount, pDynamicOffsets);
- PostCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, setCount, dynamicOffsetCount);
+ PostCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout,
+ firstSet, setCount, dynamicOffsetCount);
}
-bool PostCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType)
-{
+bool PostCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, VkIndexType indexType) {
-
-
- if(indexType < VK_INDEX_TYPE_BEGIN_RANGE ||
- indexType > VK_INDEX_TYPE_END_RANGE)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdBindIndexBuffer parameter, VkIndexType indexType, is an unrecognized enumerator");
+ if (indexType < VK_INDEX_TYPE_BEGIN_RANGE ||
+ indexType > VK_INDEX_TYPE_END_RANGE) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdBindIndexBuffer parameter, VkIndexType indexType, is an "
+ "unrecognized enumerator");
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- VkIndexType indexType)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, VkIndexType indexType) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
PostCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
}
-bool PreCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets)
-{
- if(pBuffers != nullptr)
- {
+bool PreCmdBindVertexBuffers(VkCommandBuffer commandBuffer,
+ const VkBuffer *pBuffers,
+ const VkDeviceSize *pOffsets) {
+ if (pBuffers != nullptr) {
}
- if(pOffsets != nullptr)
- {
+ if (pOffsets != nullptr) {
}
return true;
}
-bool PostCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount)
-{
-
-
+bool PostCmdBindVertexBuffers(VkCommandBuffer commandBuffer,
+ uint32_t firstBinding, uint32_t bindingCount) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(
- VkCommandBuffer commandBuffer,
- uint32_t firstBinding,
- uint32_t bindingCount,
- const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
+ uint32_t bindingCount, const VkBuffer *pBuffers,
+ const VkDeviceSize *pOffsets) {
PreCmdBindVertexBuffers(commandBuffer, pBuffers, pOffsets);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount,
+ pBuffers, pOffsets);
PostCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount);
}
-bool PreCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance)
-{
+bool PreCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount,
+ uint32_t instanceCount, uint32_t firstVertex,
+ uint32_t firstInstance) {
if (vertexCount == 0) {
- // TODO: Verify against Valid Usage section. I don't see a non-zero vertexCount listed, may need to add that and make
+ // TODO: Verify against Valid Usage section. I don't see a non-zero
+ // vertexCount listed, may need to add that and make
// this an error or leave as is.
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdDraw parameter, uint32_t vertexCount, is 0");
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdDraw parameter, uint32_t vertexCount, is 0");
return false;
}
if (instanceCount == 0) {
- // TODO: Verify against Valid Usage section. I don't see a non-zero instanceCount listed, may need to add that and make
+ // TODO: Verify against Valid Usage section. I don't see a non-zero
+ // instanceCount listed, may need to add that and make
// this an error or leave as is.
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_WARN_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdDraw parameter, uint32_t instanceCount, is 0");
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_WARN_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdDraw parameter, uint32_t instanceCount, is 0");
return false;
}
return true;
}
-bool PostCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t firstVertex,
- uint32_t vertexCount,
- uint32_t firstInstance,
- uint32_t instanceCount)
-{
-
-
-
-
+bool PostCmdDraw(VkCommandBuffer commandBuffer, uint32_t firstVertex,
+ uint32_t vertexCount, uint32_t firstInstance,
+ uint32_t instanceCount) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDraw(
- VkCommandBuffer commandBuffer,
- uint32_t vertexCount,
- uint32_t instanceCount,
- uint32_t firstVertex,
- uint32_t firstInstance)
-{
- PreCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount,
+ uint32_t instanceCount, uint32_t firstVertex,
+ uint32_t firstInstance) {
+ PreCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex,
+ firstInstance);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDraw(
+ commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
- PostCmdDraw(commandBuffer, firstVertex, vertexCount, firstInstance, instanceCount);
+ PostCmdDraw(commandBuffer, firstVertex, vertexCount, firstInstance,
+ instanceCount);
}
-bool PostCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t firstIndex,
- uint32_t indexCount,
- int32_t vertexOffset,
- uint32_t firstInstance,
- uint32_t instanceCount)
-{
-
-
-
-
-
+bool PostCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t firstIndex,
+ uint32_t indexCount, int32_t vertexOffset,
+ uint32_t firstInstance, uint32_t instanceCount) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(
- VkCommandBuffer commandBuffer,
- uint32_t indexCount,
- uint32_t instanceCount,
- uint32_t firstIndex,
- int32_t vertexOffset,
- uint32_t firstInstance)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
+ uint32_t instanceCount, uint32_t firstIndex,
+ int32_t vertexOffset, uint32_t firstInstance) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex,
+ vertexOffset, firstInstance);
- PostCmdDrawIndexed(commandBuffer, firstIndex, indexCount, vertexOffset, firstInstance, instanceCount);
+ PostCmdDrawIndexed(commandBuffer, firstIndex, indexCount, vertexOffset,
+ firstInstance, instanceCount);
}
-bool PostCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t count,
- uint32_t stride)
-{
-
-
-
-
+bool PostCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, uint32_t count, uint32_t stride) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t count,
- uint32_t stride)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDrawIndirect(commandBuffer, buffer, offset, count, stride);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, uint32_t count, uint32_t stride) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdDrawIndirect(commandBuffer, buffer, offset, count, stride);
PostCmdDrawIndirect(commandBuffer, buffer, offset, count, stride);
}
-bool PostCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t count,
- uint32_t stride)
-{
-
-
-
-
+bool PostCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, uint32_t count,
+ uint32_t stride) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset,
- uint32_t count,
- uint32_t stride)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, count, stride);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset, uint32_t count,
+ uint32_t stride) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, count, stride);
PostCmdDrawIndexedIndirect(commandBuffer, buffer, offset, count, stride);
}
-bool PostCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t x,
- uint32_t y,
- uint32_t z)
-{
-
-
-
+bool PostCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y,
+ uint32_t z) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(
- VkCommandBuffer commandBuffer,
- uint32_t x,
- uint32_t y,
- uint32_t z)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDispatch(commandBuffer, x, y, z);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y,
+ uint32_t z) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdDispatch(commandBuffer, x, y, z);
PostCmdDispatch(commandBuffer, x, y, z);
}
-bool PostCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset)
-{
-
-
+bool PostCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(
- VkCommandBuffer commandBuffer,
- VkBuffer buffer,
- VkDeviceSize offset)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdDispatchIndirect(commandBuffer, buffer, offset);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
+ VkDeviceSize offset) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdDispatchIndirect(commandBuffer, buffer, offset);
PostCmdDispatchIndirect(commandBuffer, buffer, offset);
}
-bool PreCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- const VkBufferCopy* pRegions)
-{
- if(pRegions != nullptr)
- {
+bool PreCmdCopyBuffer(VkCommandBuffer commandBuffer,
+ const VkBufferCopy *pRegions) {
+ if (pRegions != nullptr) {
}
return true;
}
-bool PostCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount)
-{
-
-
-
+bool PostCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkBuffer dstBuffer, uint32_t regionCount) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkBuffer dstBuffer, uint32_t regionCount,
+ const VkBufferCopy *pRegions) {
PreCmdCopyBuffer(commandBuffer, pRegions);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdCopyBuffer(
+ commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
PostCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount);
}
-bool PreCmdCopyImage(
- VkCommandBuffer commandBuffer,
- const VkImageCopy* pRegions)
-{
- if(pRegions != nullptr)
- {
- if ((pRegions->srcSubresource.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator");
- return false;
- }
- if ((pRegions->dstSubresource.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator");
- return false;
- }
+bool PreCmdCopyImage(VkCommandBuffer commandBuffer,
+ const VkImageCopy *pRegions) {
+ if (pRegions != nullptr) {
+ if ((pRegions->srcSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyImage parameter, VkImageAspect "
+ "pRegions->srcSubresource.aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
+ if ((pRegions->dstSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyImage parameter, VkImageAspect "
+ "pRegions->dstSubresource.aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount)
-{
+bool PostCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount) {
if (((srcImageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (srcImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (srcImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyImage parameter, VkImageLayout srcImageLayout, is an unrecognized enumerator");
+ (srcImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (srcImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyImage parameter, VkImageLayout srcImageLayout, is an "
+ "unrecognized enumerator");
return false;
}
-
if (((dstImageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (dstImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (dstImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyImage parameter, VkImageLayout dstImageLayout, is an unrecognized enumerator");
+ (dstImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (dstImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyImage parameter, VkImageLayout dstImageLayout, is an "
+ "unrecognized enumerator");
return false;
}
-
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageCopy* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageCopy *pRegions) {
PreCmdCopyImage(commandBuffer, pRegions);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage,
+ dstImageLayout, regionCount, pRegions);
- PostCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount);
+ PostCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage,
+ dstImageLayout, regionCount);
}
-bool PreCmdBlitImage(
- VkCommandBuffer commandBuffer,
- const VkImageBlit* pRegions)
-{
- if(pRegions != nullptr)
- {
- if ((pRegions->srcSubresource.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator");
- return false;
- }
- if ((pRegions->dstSubresource.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator");
- return false;
- }
+bool PreCmdBlitImage(VkCommandBuffer commandBuffer,
+ const VkImageBlit *pRegions) {
+ if (pRegions != nullptr) {
+ if ((pRegions->srcSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyImage parameter, VkImageAspect "
+ "pRegions->srcSubresource.aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
+ if ((pRegions->dstSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyImage parameter, VkImageAspect "
+ "pRegions->dstSubresource.aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- VkFilter filter)
-{
-
+bool PostCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ VkFilter filter) {
if (((srcImageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (srcImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (srcImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdBlitImage parameter, VkImageLayout srcImageLayout, is an unrecognized enumerator");
+ (srcImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (srcImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdBlitImage parameter, VkImageLayout srcImageLayout, is an "
+ "unrecognized enumerator");
return false;
}
-
if (((dstImageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (dstImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (dstImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdBlitImage parameter, VkImageLayout dstImageLayout, is an unrecognized enumerator");
+ (dstImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (dstImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdBlitImage parameter, VkImageLayout dstImageLayout, is an "
+ "unrecognized enumerator");
return false;
}
-
- if(filter < VK_FILTER_BEGIN_RANGE ||
- filter > VK_FILTER_END_RANGE)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdBlitImage parameter, VkFilter filter, is an unrecognized enumerator");
+ if (filter < VK_FILTER_BEGIN_RANGE || filter > VK_FILTER_END_RANGE) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdBlitImage parameter, VkFilter filter, is an unrecognized "
+ "enumerator");
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageBlit* pRegions,
- VkFilter filter)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageBlit *pRegions, VkFilter filter) {
PreCmdBlitImage(commandBuffer, pRegions);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage,
+ dstImageLayout, regionCount, pRegions, filter);
- PostCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, filter);
+ PostCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage,
+ dstImageLayout, regionCount, filter);
}
-bool PreCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- const VkBufferImageCopy* pRegions)
-{
- if(pRegions != nullptr)
- {
- if ((pRegions->imageSubresource.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyBufferToImage parameter, VkImageAspect pRegions->imageSubresource.aspectMask, is an unrecognized enumerator");
- return false;
- }
+bool PreCmdCopyBufferToImage(VkCommandBuffer commandBuffer,
+ const VkBufferImageCopy *pRegions) {
+ if (pRegions != nullptr) {
+ if ((pRegions->imageSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyBufferToImage parameter, VkImageAspect "
+ "pRegions->imageSubresource.aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount)
-{
-
-
+bool PostCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkImage dstImage, VkImageLayout dstImageLayout,
+ uint32_t regionCount) {
if (((dstImageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (dstImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (dstImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyBufferToImage parameter, VkImageLayout dstImageLayout, is an unrecognized enumerator");
+ (dstImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (dstImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyBufferToImage parameter, VkImageLayout "
+ "dstImageLayout, is an unrecognized enumerator");
return false;
}
-
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(
- VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
+ VkImage dstImage, VkImageLayout dstImageLayout,
+ uint32_t regionCount,
+ const VkBufferImageCopy *pRegions) {
PreCmdCopyBufferToImage(commandBuffer, pRegions);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage,
+ dstImageLayout, regionCount, pRegions);
- PostCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount);
+ PostCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout,
+ regionCount);
}
-bool PreCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- const VkBufferImageCopy* pRegions)
-{
- if(pRegions != nullptr)
- {
- if ((pRegions->imageSubresource.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyImageToBuffer parameter, VkImageAspect pRegions->imageSubresource.aspectMask, is an unrecognized enumerator");
- return false;
- }
+bool PreCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,
+ const VkBufferImageCopy *pRegions) {
+ if (pRegions != nullptr) {
+ if ((pRegions->imageSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyImageToBuffer parameter, VkImageAspect "
+ "pRegions->imageSubresource.aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount)
-{
-
+bool PostCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+ uint32_t regionCount) {
if (((srcImageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (srcImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (srcImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdCopyImageToBuffer parameter, VkImageLayout srcImageLayout, is an unrecognized enumerator");
+ (srcImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (srcImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdCopyImageToBuffer parameter, VkImageLayout "
+ "srcImageLayout, is an unrecognized enumerator");
return false;
}
-
-
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferImageCopy* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkBuffer dstBuffer,
+ uint32_t regionCount,
+ const VkBufferImageCopy *pRegions) {
PreCmdCopyImageToBuffer(commandBuffer, pRegions);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout,
+ dstBuffer, regionCount, pRegions);
- PostCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount);
+ PostCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer,
+ regionCount);
}
-bool PreCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- const uint32_t* pData)
-{
- if(pData != nullptr)
- {
+bool PreCmdUpdateBuffer(VkCommandBuffer commandBuffer, const uint32_t *pData) {
+ if (pData != nullptr) {
}
return true;
}
-bool PostCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize)
-{
-
-
-
+bool PostCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize dataSize) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize dataSize,
- const uint32_t* pData)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize dataSize,
+ const uint32_t *pData) {
PreCmdUpdateBuffer(commandBuffer, pData);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
PostCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize);
}
-bool PostCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data)
-{
-
-
-
-
+bool PostCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize size,
+ uint32_t data) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(
- VkCommandBuffer commandBuffer,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize size,
- uint32_t data)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
PostCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
}
-bool PreCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- const VkClearColorValue* pColor,
- const VkImageSubresourceRange* pRanges)
-{
- if(pColor != nullptr)
- {
+bool PreCmdClearColorImage(VkCommandBuffer commandBuffer,
+ const VkClearColorValue *pColor,
+ const VkImageSubresourceRange *pRanges) {
+ if (pColor != nullptr) {
}
- if(pRanges != nullptr)
- {
+ if (pRanges != nullptr) {
/* TODO: How should we validate pRanges->aspectMask */
}
return true;
}
-bool PostCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- uint32_t rangeCount)
-{
-
+bool PostCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout, uint32_t rangeCount) {
if (((imageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (imageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (imageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdClearColorImage parameter, VkImageLayout imageLayout, is an unrecognized enumerator");
+ (imageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (imageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdClearColorImage parameter, VkImageLayout imageLayout, is "
+ "an unrecognized enumerator");
return false;
}
-
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearColorValue* pColor,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout,
+ const VkClearColorValue *pColor, uint32_t rangeCount,
+ const VkImageSubresourceRange *pRanges) {
PreCmdClearColorImage(commandBuffer, pColor, pRanges);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdClearColorImage(
+ commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
PostCmdClearColorImage(commandBuffer, image, imageLayout, rangeCount);
}
-bool PreCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- const VkImageSubresourceRange* pRanges)
-{
- if(pRanges != nullptr)
- {
+bool PreCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,
+ const VkImageSubresourceRange *pRanges) {
+ if (pRanges != nullptr) {
/*
* TODO: How do we validation pRanges->aspectMask?
* Allows values are: VK_IMAGE_ASPECT_DEPTH_BIT and
@@ -5966,794 +5567,693 @@
}
bool PostCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount)
-{
-
+ VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
+ const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount) {
if (((imageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (imageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (imageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdClearDepthStencilImage parameter, VkImageLayout imageLayout, is an unrecognized enumerator");
+ (imageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (imageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdClearDepthStencilImage parameter, VkImageLayout "
+ "imageLayout, is an unrecognized enumerator");
return false;
}
-
-
-
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(
- VkCommandBuffer commandBuffer,
- VkImage image,
- VkImageLayout imageLayout,
- const VkClearDepthStencilValue* pDepthStencil,
- uint32_t rangeCount,
- const VkImageSubresourceRange* pRanges)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
+ VkImageLayout imageLayout,
+ const VkClearDepthStencilValue *pDepthStencil,
+ uint32_t rangeCount,
+ const VkImageSubresourceRange *pRanges) {
PreCmdClearDepthStencilImage(commandBuffer, pRanges);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdClearDepthStencilImage(commandBuffer, image, imageLayout,
+ pDepthStencil, rangeCount, pRanges);
- PostCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount);
+ PostCmdClearDepthStencilImage(commandBuffer, image, imageLayout,
+ pDepthStencil, rangeCount);
}
-bool PreCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- const VkClearColorValue* pColor,
- const VkClearRect* pRects)
-{
- if(pColor != nullptr)
- {
+bool PreCmdClearAttachments(VkCommandBuffer commandBuffer,
+ const VkClearColorValue *pColor,
+ const VkClearRect *pRects) {
+ if (pColor != nullptr) {
}
- if(pRects != nullptr)
- {
+ if (pRects != nullptr) {
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(
- VkCommandBuffer commandBuffer,
- uint32_t attachmentCount,
- const VkClearAttachment* pAttachments,
- uint32_t rectCount,
- const VkClearRect* pRects)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdClearAttachments(VkCommandBuffer commandBuffer,
+ uint32_t attachmentCount,
+ const VkClearAttachment *pAttachments,
+ uint32_t rectCount, const VkClearRect *pRects) {
for (uint32_t i = 0; i < attachmentCount; i++) {
- PreCmdClearAttachments(commandBuffer, &pAttachments[i].clearValue.color, pRects);
+ PreCmdClearAttachments(commandBuffer, &pAttachments[i].clearValue.color,
+ pRects);
}
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdClearAttachments(
+ commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
}
-bool PreCmdResolveImage(
- VkCommandBuffer commandBuffer,
- const VkImageResolve* pRegions)
-{
- if(pRegions != nullptr)
- {
- if ((pRegions->srcSubresource.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdResolveImage parameter, VkImageAspect pRegions->srcSubresource.aspectMask, is an unrecognized enumerator");
- return false;
- }
- if ((pRegions->dstSubresource.aspectMask &
- (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) == 0)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdResolveImage parameter, VkImageAspect pRegions->dstSubresource.aspectMask, is an unrecognized enumerator");
- return false;
- }
+bool PreCmdResolveImage(VkCommandBuffer commandBuffer,
+ const VkImageResolve *pRegions) {
+ if (pRegions != nullptr) {
+ if ((pRegions->srcSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdResolveImage parameter, VkImageAspect "
+ "pRegions->srcSubresource.aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
+ if ((pRegions->dstSubresource.aspectMask &
+ (VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT |
+ VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT)) ==
+ 0) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdResolveImage parameter, VkImageAspect "
+ "pRegions->dstSubresource.aspectMask, is an unrecognized "
+ "enumerator");
+ return false;
+ }
}
return true;
}
-bool PostCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount)
-{
-
+bool PostCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount) {
if (((srcImageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (srcImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (srcImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdResolveImage parameter, VkImageLayout srcImageLayout, is an unrecognized enumerator");
+ (srcImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (srcImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdResolveImage parameter, VkImageLayout srcImageLayout, is "
+ "an unrecognized enumerator");
return false;
}
-
if (((dstImageLayout < VK_IMAGE_LAYOUT_BEGIN_RANGE) ||
- (dstImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
- (dstImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR))
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdResolveImage parameter, VkImageLayout dstImageLayout, is an unrecognized enumerator");
+ (dstImageLayout > VK_IMAGE_LAYOUT_END_RANGE)) &&
+ (dstImageLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdResolveImage parameter, VkImageLayout dstImageLayout, is "
+ "an unrecognized enumerator");
return false;
}
-
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(
- VkCommandBuffer commandBuffer,
- VkImage srcImage,
- VkImageLayout srcImageLayout,
- VkImage dstImage,
- VkImageLayout dstImageLayout,
- uint32_t regionCount,
- const VkImageResolve* pRegions)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
+ VkImageLayout srcImageLayout, VkImage dstImage,
+ VkImageLayout dstImageLayout, uint32_t regionCount,
+ const VkImageResolve *pRegions) {
PreCmdResolveImage(commandBuffer, pRegions);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage,
+ dstImageLayout, regionCount, pRegions);
- PostCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount);
+ PostCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage,
+ dstImageLayout, regionCount);
}
-bool PostCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask)
-{
-
-
+bool PostCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+ VkPipelineStageFlags stageMask) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdSetEvent(commandBuffer, event, stageMask);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+ VkPipelineStageFlags stageMask) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdSetEvent(commandBuffer, event, stageMask);
PostCmdSetEvent(commandBuffer, event, stageMask);
}
-bool PostCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask)
-{
-
-
+bool PostCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+ VkPipelineStageFlags stageMask) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(
- VkCommandBuffer commandBuffer,
- VkEvent event,
- VkPipelineStageFlags stageMask)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdResetEvent(commandBuffer, event, stageMask);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
+ VkPipelineStageFlags stageMask) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdResetEvent(commandBuffer, event, stageMask);
PostCmdResetEvent(commandBuffer, event, stageMask);
}
-bool PreCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- const VkEvent* pEvents,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier *pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier *pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier *pImageMemoryBarriers)
-{
- if(pEvents != nullptr)
- {
+bool PreCmdWaitEvents(VkCommandBuffer commandBuffer, const VkEvent *pEvents,
+ uint32_t memoryBarrierCount,
+ const VkMemoryBarrier *pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier *pImageMemoryBarriers) {
+ if (pEvents != nullptr) {
}
- if(pMemoryBarriers != nullptr)
- {
+ if (pMemoryBarriers != nullptr) {
}
return true;
}
-bool PostCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount)
-{
-
-
-
-
+bool PostCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount,
+ VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask,
+ uint32_t memoryBarrierCount) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(
- VkCommandBuffer commandBuffer,
- uint32_t eventCount,
- const VkEvent *pEvents,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier *pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier *pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier *pImageMemoryBarriers)
-{
- PreCmdWaitEvents(commandBuffer, pEvents, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount,
+ const VkEvent *pEvents, VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask,
+ uint32_t memoryBarrierCount,
+ const VkMemoryBarrier *pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier *pImageMemoryBarriers) {
+ PreCmdWaitEvents(commandBuffer, pEvents, memoryBarrierCount,
+ pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount,
+ pImageMemoryBarriers);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdWaitEvents(
+ commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- PostCmdWaitEvents(commandBuffer, eventCount, srcStageMask, dstStageMask, memoryBarrierCount);
+ PostCmdWaitEvents(commandBuffer, eventCount, srcStageMask, dstStageMask,
+ memoryBarrierCount);
}
-bool PreCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier *pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
- const VkBufferMemoryBarrier *pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier *pImageMemoryBarriers)
-{
- if(pMemoryBarriers != nullptr)
- {
+bool PreCmdPipelineBarrier(VkCommandBuffer commandBuffer,
+ uint32_t memoryBarrierCount,
+ const VkMemoryBarrier *pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
+ const VkBufferMemoryBarrier *pBufferMemoryBarriers,
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier *pImageMemoryBarriers) {
+ if (pMemoryBarriers != nullptr) {
}
return true;
}
-bool PostCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount)
-{
-
-
-
-
+bool PostCmdPipelineBarrier(VkCommandBuffer commandBuffer,
+ VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask,
+ VkDependencyFlags dependencyFlags,
+ uint32_t memoryBarrierCount) {
return true;
}
VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlags srcStageMask,
- VkPipelineStageFlags dstStageMask,
- VkDependencyFlags dependencyFlags,
- uint32_t memoryBarrierCount,
- const VkMemoryBarrier *pMemoryBarriers,
- uint32_t bufferMemoryBarrierCount,
+ VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
+ VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
+ uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
+ uint32_t bufferMemoryBarrierCount,
const VkBufferMemoryBarrier *pBufferMemoryBarriers,
- uint32_t imageMemoryBarrierCount,
- const VkImageMemoryBarrier *pImageMemoryBarriers)
-{
- PreCmdPipelineBarrier(commandBuffer, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ uint32_t imageMemoryBarrierCount,
+ const VkImageMemoryBarrier *pImageMemoryBarriers) {
+ PreCmdPipelineBarrier(commandBuffer, memoryBarrierCount, pMemoryBarriers,
+ bufferMemoryBarrierCount, pBufferMemoryBarriers,
+ imageMemoryBarrierCount, pImageMemoryBarriers);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdPipelineBarrier(
+ commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
+ memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
+ pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
- PostCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount);
+ PostCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask,
+ dependencyFlags, memoryBarrierCount);
}
-bool PostCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t slot,
- VkQueryControlFlags flags)
-{
-
-
-
+bool PostCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t slot, VkQueryControlFlags flags) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t slot,
- VkQueryControlFlags flags)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBeginQuery(commandBuffer, queryPool, slot, flags);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t slot, VkQueryControlFlags flags) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdBeginQuery(commandBuffer, queryPool, slot, flags);
PostCmdBeginQuery(commandBuffer, queryPool, slot, flags);
}
-bool PostCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t slot)
-{
-
-
+bool PostCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t slot) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t slot)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdEndQuery(commandBuffer, queryPool, slot);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t slot) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdEndQuery(commandBuffer, queryPool, slot);
PostCmdEndQuery(commandBuffer, queryPool, slot);
}
-bool PostCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount)
-{
-
-
-
+bool PostCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t firstQuery, uint32_t queryCount) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
+ uint32_t firstQuery, uint32_t queryCount) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
PostCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
}
-bool PostCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t slot)
-{
+bool PostCmdWriteTimestamp(VkCommandBuffer commandBuffer,
+ VkPipelineStageFlagBits pipelineStage,
+ VkQueryPool queryPool, uint32_t slot) {
ValidateEnumerator(pipelineStage);
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(
- VkCommandBuffer commandBuffer,
- VkPipelineStageFlagBits pipelineStage,
- VkQueryPool queryPool,
- uint32_t slot)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,
+ VkPipelineStageFlagBits pipelineStage,
+ VkQueryPool queryPool, uint32_t slot) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
PostCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
}
-bool PostCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags)
-{
-
-
-
-
-
-
-
+bool PostCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
+ VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize stride,
+ VkQueryResultFlags flags) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(
- VkCommandBuffer commandBuffer,
- VkQueryPool queryPool,
- uint32_t firstQuery,
- uint32_t queryCount,
- VkBuffer dstBuffer,
- VkDeviceSize dstOffset,
- VkDeviceSize stride,
- VkQueryResultFlags flags)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
+ VkQueryPool queryPool, uint32_t firstQuery,
+ uint32_t queryCount, VkBuffer dstBuffer,
+ VkDeviceSize dstOffset, VkDeviceSize stride,
+ VkQueryResultFlags flags) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery,
+ queryCount, dstBuffer, dstOffset, stride,
+ flags);
- PostCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+ PostCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery,
+ queryCount, dstBuffer, dstOffset, stride,
+ flags);
}
-bool PreCmdPushConstants(
- VkCommandBuffer commandBuffer,
- const void* pValues)
-{
- if(pValues != nullptr)
- {
+bool PreCmdPushConstants(VkCommandBuffer commandBuffer, const void *pValues) {
+ if (pValues != nullptr) {
}
return true;
}
-bool PostCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size)
-{
-
-
-
-
+bool PostCmdPushConstants(VkCommandBuffer commandBuffer,
+ VkPipelineLayout layout,
+ VkShaderStageFlags stageFlags, uint32_t offset,
+ uint32_t size) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(
- VkCommandBuffer commandBuffer,
- VkPipelineLayout layout,
- VkShaderStageFlags stageFlags,
- uint32_t offset,
- uint32_t size,
- const void* pValues)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
+ VkShaderStageFlags stageFlags, uint32_t offset,
+ uint32_t size, const void *pValues) {
PreCmdPushConstants(commandBuffer, pValues);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdPushConstants(
+ commandBuffer, layout, stageFlags, offset, size, pValues);
PostCmdPushConstants(commandBuffer, layout, stageFlags, offset, size);
}
-bool PreCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin)
-{
- if(pRenderPassBegin != nullptr)
- {
- if(pRenderPassBegin->sType != VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdBeginRenderPass parameter, VkStructureType pRenderPassBegin->sType, is an invalid enumerator");
- return false;
- }
- if(pRenderPassBegin->pClearValues != nullptr)
- {
- }
+bool PreCmdBeginRenderPass(VkCommandBuffer commandBuffer,
+ const VkRenderPassBeginInfo *pRenderPassBegin) {
+ if (pRenderPassBegin != nullptr) {
+ if (pRenderPassBegin->sType !=
+ VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdBeginRenderPass parameter, VkStructureType "
+ "pRenderPassBegin->sType, is an invalid enumerator");
+ return false;
+ }
+ if (pRenderPassBegin->pClearValues != nullptr) {
+ }
}
return true;
}
-bool PostCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents)
-{
+bool PostCmdBeginRenderPass(VkCommandBuffer commandBuffer,
+ VkSubpassContents contents) {
- if(contents < VK_SUBPASS_CONTENTS_BEGIN_RANGE ||
- contents > VK_SUBPASS_CONTENTS_END_RANGE)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdBeginRenderPass parameter, VkSubpassContents contents, is an unrecognized enumerator");
+ if (contents < VK_SUBPASS_CONTENTS_BEGIN_RANGE ||
+ contents > VK_SUBPASS_CONTENTS_END_RANGE) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdBeginRenderPass parameter, VkSubpassContents contents, "
+ "is an unrecognized enumerator");
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(
- VkCommandBuffer commandBuffer,
- const VkRenderPassBeginInfo* pRenderPassBegin,
- VkSubpassContents contents)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,
+ const VkRenderPassBeginInfo *pRenderPassBegin,
+ VkSubpassContents contents) {
PreCmdBeginRenderPass(commandBuffer, pRenderPassBegin);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
PostCmdBeginRenderPass(commandBuffer, contents);
}
-bool PostCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents)
-{
+bool PostCmdNextSubpass(VkCommandBuffer commandBuffer,
+ VkSubpassContents contents) {
- if(contents < VK_SUBPASS_CONTENTS_BEGIN_RANGE ||
- contents > VK_SUBPASS_CONTENTS_END_RANGE)
- {
- log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
- "vkCmdNextSubpass parameter, VkSubpassContents contents, is an unrecognized enumerator");
+ if (contents < VK_SUBPASS_CONTENTS_BEGIN_RANGE ||
+ contents > VK_SUBPASS_CONTENTS_END_RANGE) {
+ log_msg(mdd(commandBuffer), VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, "PARAMCHECK",
+ "vkCmdNextSubpass parameter, VkSubpassContents contents, is an "
+ "unrecognized enumerator");
return false;
}
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(
- VkCommandBuffer commandBuffer,
- VkSubpassContents contents)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdNextSubpass(commandBuffer, contents);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdNextSubpass(VkCommandBuffer commandBuffer,
+ VkSubpassContents contents) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdNextSubpass(commandBuffer, contents);
PostCmdNextSubpass(commandBuffer, contents);
}
-bool PostCmdEndRenderPass(
- VkCommandBuffer commandBuffer)
-{
+bool PostCmdEndRenderPass(VkCommandBuffer commandBuffer) { return true; }
- return true;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(
- VkCommandBuffer commandBuffer)
-{
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdEndRenderPass(commandBuffer);
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdEndRenderPass(VkCommandBuffer commandBuffer) {
+ get_dispatch_table(pc_device_table_map, commandBuffer)
+ ->CmdEndRenderPass(commandBuffer);
PostCmdEndRenderPass(commandBuffer);
}
-bool PreCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- const VkCommandBuffer* pCommandBuffers)
-{
- if(pCommandBuffers != nullptr)
- {
+bool PreCmdExecuteCommands(VkCommandBuffer commandBuffer,
+ const VkCommandBuffer *pCommandBuffers) {
+ if (pCommandBuffers != nullptr) {
}
return true;
}
-bool PostCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBuffersCount)
-{
-
+bool PostCmdExecuteCommands(VkCommandBuffer commandBuffer,
+ uint32_t commandBuffersCount) {
return true;
}
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(
- VkCommandBuffer commandBuffer,
- uint32_t commandBuffersCount,
- const VkCommandBuffer* pCommandBuffers)
-{
+VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkCmdExecuteCommands(VkCommandBuffer commandBuffer,
+ uint32_t commandBuffersCount,
+ const VkCommandBuffer *pCommandBuffers) {
PreCmdExecuteCommands(commandBuffer, pCommandBuffers);
- get_dispatch_table(pc_device_table_map, commandBuffer)->CmdExecuteCommands(commandBuffer, commandBuffersCount, pCommandBuffers);
+ get_dispatch_table(pc_device_table_map, commandBuffer)->CmdExecuteCommands(
+ commandBuffer, commandBuffersCount, pCommandBuffers);
PostCmdExecuteCommands(commandBuffer, commandBuffersCount);
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char* funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetDeviceProcAddr(VkDevice device, const char *funcName) {
if (!strcmp(funcName, "vkGetDeviceProcAddr"))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
if (!strcmp(funcName, "vkDestroyDevice"))
- return (PFN_vkVoidFunction) vkDestroyDevice;
+ return (PFN_vkVoidFunction)vkDestroyDevice;
if (!strcmp(funcName, "vkGetDeviceQueue"))
- return (PFN_vkVoidFunction) vkGetDeviceQueue;
+ return (PFN_vkVoidFunction)vkGetDeviceQueue;
if (!strcmp(funcName, "vkQueueSubmit"))
- return (PFN_vkVoidFunction) vkQueueSubmit;
+ return (PFN_vkVoidFunction)vkQueueSubmit;
if (!strcmp(funcName, "vkQueueWaitIdle"))
- return (PFN_vkVoidFunction) vkQueueWaitIdle;
+ return (PFN_vkVoidFunction)vkQueueWaitIdle;
if (!strcmp(funcName, "vkDeviceWaitIdle"))
- return (PFN_vkVoidFunction) vkDeviceWaitIdle;
+ return (PFN_vkVoidFunction)vkDeviceWaitIdle;
if (!strcmp(funcName, "vkAllocateMemory"))
- return (PFN_vkVoidFunction) vkAllocateMemory;
+ return (PFN_vkVoidFunction)vkAllocateMemory;
if (!strcmp(funcName, "vkMapMemory"))
- return (PFN_vkVoidFunction) vkMapMemory;
+ return (PFN_vkVoidFunction)vkMapMemory;
if (!strcmp(funcName, "vkFlushMappedMemoryRanges"))
- return (PFN_vkVoidFunction) vkFlushMappedMemoryRanges;
+ return (PFN_vkVoidFunction)vkFlushMappedMemoryRanges;
if (!strcmp(funcName, "vkInvalidateMappedMemoryRanges"))
- return (PFN_vkVoidFunction) vkInvalidateMappedMemoryRanges;
+ return (PFN_vkVoidFunction)vkInvalidateMappedMemoryRanges;
if (!strcmp(funcName, "vkCreateFence"))
- return (PFN_vkVoidFunction) vkCreateFence;
+ return (PFN_vkVoidFunction)vkCreateFence;
if (!strcmp(funcName, "vkResetFences"))
- return (PFN_vkVoidFunction) vkResetFences;
+ return (PFN_vkVoidFunction)vkResetFences;
if (!strcmp(funcName, "vkGetFenceStatus"))
- return (PFN_vkVoidFunction) vkGetFenceStatus;
+ return (PFN_vkVoidFunction)vkGetFenceStatus;
if (!strcmp(funcName, "vkWaitForFences"))
- return (PFN_vkVoidFunction) vkWaitForFences;
+ return (PFN_vkVoidFunction)vkWaitForFences;
if (!strcmp(funcName, "vkCreateSemaphore"))
- return (PFN_vkVoidFunction) vkCreateSemaphore;
+ return (PFN_vkVoidFunction)vkCreateSemaphore;
if (!strcmp(funcName, "vkCreateEvent"))
- return (PFN_vkVoidFunction) vkCreateEvent;
+ return (PFN_vkVoidFunction)vkCreateEvent;
if (!strcmp(funcName, "vkGetEventStatus"))
- return (PFN_vkVoidFunction) vkGetEventStatus;
+ return (PFN_vkVoidFunction)vkGetEventStatus;
if (!strcmp(funcName, "vkSetEvent"))
- return (PFN_vkVoidFunction) vkSetEvent;
+ return (PFN_vkVoidFunction)vkSetEvent;
if (!strcmp(funcName, "vkResetEvent"))
- return (PFN_vkVoidFunction) vkResetEvent;
+ return (PFN_vkVoidFunction)vkResetEvent;
if (!strcmp(funcName, "vkCreateQueryPool"))
- return (PFN_vkVoidFunction) vkCreateQueryPool;
+ return (PFN_vkVoidFunction)vkCreateQueryPool;
if (!strcmp(funcName, "vkGetQueryPoolResults"))
- return (PFN_vkVoidFunction) vkGetQueryPoolResults;
+ return (PFN_vkVoidFunction)vkGetQueryPoolResults;
if (!strcmp(funcName, "vkCreateBuffer"))
- return (PFN_vkVoidFunction) vkCreateBuffer;
+ return (PFN_vkVoidFunction)vkCreateBuffer;
if (!strcmp(funcName, "vkCreateBufferView"))
- return (PFN_vkVoidFunction) vkCreateBufferView;
+ return (PFN_vkVoidFunction)vkCreateBufferView;
if (!strcmp(funcName, "vkCreateImage"))
- return (PFN_vkVoidFunction) vkCreateImage;
+ return (PFN_vkVoidFunction)vkCreateImage;
if (!strcmp(funcName, "vkGetImageSubresourceLayout"))
- return (PFN_vkVoidFunction) vkGetImageSubresourceLayout;
+ return (PFN_vkVoidFunction)vkGetImageSubresourceLayout;
if (!strcmp(funcName, "vkCreateImageView"))
- return (PFN_vkVoidFunction) vkCreateImageView;
+ return (PFN_vkVoidFunction)vkCreateImageView;
if (!strcmp(funcName, "vkCreateShaderModule"))
- return (PFN_vkVoidFunction) vkCreateShaderModule;
+ return (PFN_vkVoidFunction)vkCreateShaderModule;
if (!strcmp(funcName, "vkCreateGraphicsPipelines"))
- return (PFN_vkVoidFunction) vkCreateGraphicsPipelines;
+ return (PFN_vkVoidFunction)vkCreateGraphicsPipelines;
if (!strcmp(funcName, "vkCreateComputePipelines"))
- return (PFN_vkVoidFunction) vkCreateComputePipelines;
+ return (PFN_vkVoidFunction)vkCreateComputePipelines;
if (!strcmp(funcName, "vkCreatePipelineLayout"))
- return (PFN_vkVoidFunction) vkCreatePipelineLayout;
+ return (PFN_vkVoidFunction)vkCreatePipelineLayout;
if (!strcmp(funcName, "vkCreateSampler"))
- return (PFN_vkVoidFunction) vkCreateSampler;
+ return (PFN_vkVoidFunction)vkCreateSampler;
if (!strcmp(funcName, "vkCreateDescriptorSetLayout"))
- return (PFN_vkVoidFunction) vkCreateDescriptorSetLayout;
+ return (PFN_vkVoidFunction)vkCreateDescriptorSetLayout;
if (!strcmp(funcName, "vkCreateDescriptorPool"))
- return (PFN_vkVoidFunction) vkCreateDescriptorPool;
+ return (PFN_vkVoidFunction)vkCreateDescriptorPool;
if (!strcmp(funcName, "vkResetDescriptorPool"))
- return (PFN_vkVoidFunction) vkResetDescriptorPool;
+ return (PFN_vkVoidFunction)vkResetDescriptorPool;
if (!strcmp(funcName, "vkAllocateDescriptorSets"))
- return (PFN_vkVoidFunction) vkAllocateDescriptorSets;
+ return (PFN_vkVoidFunction)vkAllocateDescriptorSets;
if (!strcmp(funcName, "vkCmdSetViewport"))
- return (PFN_vkVoidFunction) vkCmdSetViewport;
+ return (PFN_vkVoidFunction)vkCmdSetViewport;
if (!strcmp(funcName, "vkCmdSetScissor"))
- return (PFN_vkVoidFunction) vkCmdSetScissor;
+ return (PFN_vkVoidFunction)vkCmdSetScissor;
if (!strcmp(funcName, "vkCmdSetLineWidth"))
- return (PFN_vkVoidFunction) vkCmdSetLineWidth;
+ return (PFN_vkVoidFunction)vkCmdSetLineWidth;
if (!strcmp(funcName, "vkCmdSetDepthBias"))
- return (PFN_vkVoidFunction) vkCmdSetDepthBias;
+ return (PFN_vkVoidFunction)vkCmdSetDepthBias;
if (!strcmp(funcName, "vkCmdSetBlendConstants"))
- return (PFN_vkVoidFunction) vkCmdSetBlendConstants;
+ return (PFN_vkVoidFunction)vkCmdSetBlendConstants;
if (!strcmp(funcName, "vkCmdSetDepthBounds"))
- return (PFN_vkVoidFunction) vkCmdSetDepthBounds;
+ return (PFN_vkVoidFunction)vkCmdSetDepthBounds;
if (!strcmp(funcName, "vkCmdSetStencilCompareMask"))
- return (PFN_vkVoidFunction) vkCmdSetStencilCompareMask;
+ return (PFN_vkVoidFunction)vkCmdSetStencilCompareMask;
if (!strcmp(funcName, "vkCmdSetStencilWriteMask"))
- return (PFN_vkVoidFunction) vkCmdSetStencilWriteMask;
+ return (PFN_vkVoidFunction)vkCmdSetStencilWriteMask;
if (!strcmp(funcName, "vkCmdSetStencilReference"))
- return (PFN_vkVoidFunction) vkCmdSetStencilReference;
+ return (PFN_vkVoidFunction)vkCmdSetStencilReference;
if (!strcmp(funcName, "vkAllocateCommandBuffers"))
- return (PFN_vkVoidFunction) vkAllocateCommandBuffers;
+ return (PFN_vkVoidFunction)vkAllocateCommandBuffers;
if (!strcmp(funcName, "vkBeginCommandBuffer"))
- return (PFN_vkVoidFunction) vkBeginCommandBuffer;
+ return (PFN_vkVoidFunction)vkBeginCommandBuffer;
if (!strcmp(funcName, "vkEndCommandBuffer"))
- return (PFN_vkVoidFunction) vkEndCommandBuffer;
+ return (PFN_vkVoidFunction)vkEndCommandBuffer;
if (!strcmp(funcName, "vkResetCommandBuffer"))
- return (PFN_vkVoidFunction) vkResetCommandBuffer;
+ return (PFN_vkVoidFunction)vkResetCommandBuffer;
if (!strcmp(funcName, "vkCmdBindPipeline"))
- return (PFN_vkVoidFunction) vkCmdBindPipeline;
+ return (PFN_vkVoidFunction)vkCmdBindPipeline;
if (!strcmp(funcName, "vkCmdBindDescriptorSets"))
- return (PFN_vkVoidFunction) vkCmdBindDescriptorSets;
+ return (PFN_vkVoidFunction)vkCmdBindDescriptorSets;
if (!strcmp(funcName, "vkCmdBindVertexBuffers"))
- return (PFN_vkVoidFunction) vkCmdBindVertexBuffers;
+ return (PFN_vkVoidFunction)vkCmdBindVertexBuffers;
if (!strcmp(funcName, "vkCmdBindIndexBuffer"))
- return (PFN_vkVoidFunction) vkCmdBindIndexBuffer;
+ return (PFN_vkVoidFunction)vkCmdBindIndexBuffer;
if (!strcmp(funcName, "vkCmdDraw"))
- return (PFN_vkVoidFunction) vkCmdDraw;
+ return (PFN_vkVoidFunction)vkCmdDraw;
if (!strcmp(funcName, "vkCmdDrawIndexed"))
- return (PFN_vkVoidFunction) vkCmdDrawIndexed;
+ return (PFN_vkVoidFunction)vkCmdDrawIndexed;
if (!strcmp(funcName, "vkCmdDrawIndirect"))
- return (PFN_vkVoidFunction) vkCmdDrawIndirect;
+ return (PFN_vkVoidFunction)vkCmdDrawIndirect;
if (!strcmp(funcName, "vkCmdDrawIndexedIndirect"))
- return (PFN_vkVoidFunction) vkCmdDrawIndexedIndirect;
+ return (PFN_vkVoidFunction)vkCmdDrawIndexedIndirect;
if (!strcmp(funcName, "vkCmdDispatch"))
- return (PFN_vkVoidFunction) vkCmdDispatch;
+ return (PFN_vkVoidFunction)vkCmdDispatch;
if (!strcmp(funcName, "vkCmdDispatchIndirect"))
- return (PFN_vkVoidFunction) vkCmdDispatchIndirect;
+ return (PFN_vkVoidFunction)vkCmdDispatchIndirect;
if (!strcmp(funcName, "vkCmdCopyBuffer"))
- return (PFN_vkVoidFunction) vkCmdCopyBuffer;
+ return (PFN_vkVoidFunction)vkCmdCopyBuffer;
if (!strcmp(funcName, "vkCmdCopyImage"))
- return (PFN_vkVoidFunction) vkCmdCopyImage;
+ return (PFN_vkVoidFunction)vkCmdCopyImage;
if (!strcmp(funcName, "vkCmdBlitImage"))
- return (PFN_vkVoidFunction) vkCmdBlitImage;
+ return (PFN_vkVoidFunction)vkCmdBlitImage;
if (!strcmp(funcName, "vkCmdCopyBufferToImage"))
- return (PFN_vkVoidFunction) vkCmdCopyBufferToImage;
+ return (PFN_vkVoidFunction)vkCmdCopyBufferToImage;
if (!strcmp(funcName, "vkCmdCopyImageToBuffer"))
- return (PFN_vkVoidFunction) vkCmdCopyImageToBuffer;
+ return (PFN_vkVoidFunction)vkCmdCopyImageToBuffer;
if (!strcmp(funcName, "vkCmdUpdateBuffer"))
- return (PFN_vkVoidFunction) vkCmdUpdateBuffer;
+ return (PFN_vkVoidFunction)vkCmdUpdateBuffer;
if (!strcmp(funcName, "vkCmdFillBuffer"))
- return (PFN_vkVoidFunction) vkCmdFillBuffer;
+ return (PFN_vkVoidFunction)vkCmdFillBuffer;
if (!strcmp(funcName, "vkCmdClearColorImage"))
- return (PFN_vkVoidFunction) vkCmdClearColorImage;
+ return (PFN_vkVoidFunction)vkCmdClearColorImage;
if (!strcmp(funcName, "vkCmdResolveImage"))
- return (PFN_vkVoidFunction) vkCmdResolveImage;
+ return (PFN_vkVoidFunction)vkCmdResolveImage;
if (!strcmp(funcName, "vkCmdSetEvent"))
- return (PFN_vkVoidFunction) vkCmdSetEvent;
+ return (PFN_vkVoidFunction)vkCmdSetEvent;
if (!strcmp(funcName, "vkCmdResetEvent"))
- return (PFN_vkVoidFunction) vkCmdResetEvent;
+ return (PFN_vkVoidFunction)vkCmdResetEvent;
if (!strcmp(funcName, "vkCmdWaitEvents"))
- return (PFN_vkVoidFunction) vkCmdWaitEvents;
+ return (PFN_vkVoidFunction)vkCmdWaitEvents;
if (!strcmp(funcName, "vkCmdPipelineBarrier"))
- return (PFN_vkVoidFunction) vkCmdPipelineBarrier;
+ return (PFN_vkVoidFunction)vkCmdPipelineBarrier;
if (!strcmp(funcName, "vkCmdBeginQuery"))
- return (PFN_vkVoidFunction) vkCmdBeginQuery;
+ return (PFN_vkVoidFunction)vkCmdBeginQuery;
if (!strcmp(funcName, "vkCmdEndQuery"))
- return (PFN_vkVoidFunction) vkCmdEndQuery;
+ return (PFN_vkVoidFunction)vkCmdEndQuery;
if (!strcmp(funcName, "vkCmdResetQueryPool"))
- return (PFN_vkVoidFunction) vkCmdResetQueryPool;
+ return (PFN_vkVoidFunction)vkCmdResetQueryPool;
if (!strcmp(funcName, "vkCmdWriteTimestamp"))
- return (PFN_vkVoidFunction) vkCmdWriteTimestamp;
+ return (PFN_vkVoidFunction)vkCmdWriteTimestamp;
if (!strcmp(funcName, "vkCmdCopyQueryPoolResults"))
- return (PFN_vkVoidFunction) vkCmdCopyQueryPoolResults;
+ return (PFN_vkVoidFunction)vkCmdCopyQueryPoolResults;
if (!strcmp(funcName, "vkCreateFramebuffer"))
- return (PFN_vkVoidFunction) vkCreateFramebuffer;
+ return (PFN_vkVoidFunction)vkCreateFramebuffer;
if (!strcmp(funcName, "vkCreateRenderPass"))
- return (PFN_vkVoidFunction) vkCreateRenderPass;
+ return (PFN_vkVoidFunction)vkCreateRenderPass;
if (!strcmp(funcName, "vkCmdBeginRenderPass"))
- return (PFN_vkVoidFunction) vkCmdBeginRenderPass;
+ return (PFN_vkVoidFunction)vkCmdBeginRenderPass;
if (!strcmp(funcName, "vkCmdNextSubpass"))
- return (PFN_vkVoidFunction) vkCmdNextSubpass;
+ return (PFN_vkVoidFunction)vkCmdNextSubpass;
if (device == NULL) {
return NULL;
}
- if (get_dispatch_table(pc_device_table_map, device)->GetDeviceProcAddr == NULL)
+ if (get_dispatch_table(pc_device_table_map, device)->GetDeviceProcAddr ==
+ NULL)
return NULL;
- return get_dispatch_table(pc_device_table_map, device)->GetDeviceProcAddr(device, funcName);
+ return get_dispatch_table(pc_device_table_map, device)
+ ->GetDeviceProcAddr(device, funcName);
}
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* funcName)
-{
+VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetInstanceProcAddr(VkInstance instance, const char *funcName) {
if (!strcmp(funcName, "vkGetInstanceProcAddr"))
- return (PFN_vkVoidFunction) vkGetInstanceProcAddr;
+ return (PFN_vkVoidFunction)vkGetInstanceProcAddr;
if (!strcmp(funcName, "vkCreateInstance"))
- return (PFN_vkVoidFunction) vkCreateInstance;
+ return (PFN_vkVoidFunction)vkCreateInstance;
if (!strcmp(funcName, "vkDestroyInstance"))
- return (PFN_vkVoidFunction) vkDestroyInstance;
+ return (PFN_vkVoidFunction)vkDestroyInstance;
if (!strcmp(funcName, "vkCreateDevice"))
- return (PFN_vkVoidFunction) vkCreateDevice;
+ return (PFN_vkVoidFunction)vkCreateDevice;
if (!strcmp(funcName, "vkEnumeratePhysicalDevices"))
- return (PFN_vkVoidFunction) vkEnumeratePhysicalDevices;
+ return (PFN_vkVoidFunction)vkEnumeratePhysicalDevices;
if (!strcmp(funcName, "vkGetPhysicalDeviceProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceProperties;
if (!strcmp(funcName, "vkGetPhysicalDeviceFeatures"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceFeatures;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceFeatures;
if (!strcmp(funcName, "vkGetPhysicalDeviceFormatProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceFormatProperties;
+ return (PFN_vkVoidFunction)vkGetPhysicalDeviceFormatProperties;
if (!strcmp(funcName, "vkEnumerateInstanceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceLayerProperties;
if (!strcmp(funcName, "vkEnumerateInstanceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateInstanceExtensionProperties;
if (!strcmp(funcName, "vkEnumerateDeviceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateDeviceLayerProperties;
+ return (PFN_vkVoidFunction)vkEnumerateDeviceLayerProperties;
if (!strcmp(funcName, "vkEnumerateDeviceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateDeviceExtensionProperties;
+ return (PFN_vkVoidFunction)vkEnumerateDeviceExtensionProperties;
if (instance == NULL) {
return NULL;
}
- layer_data *data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- PFN_vkVoidFunction fptr = debug_report_get_instance_proc_addr(data->report_data, funcName);
- if(fptr)
+ layer_data *data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ PFN_vkVoidFunction fptr =
+ debug_report_get_instance_proc_addr(data->report_data, funcName);
+ if (fptr)
return fptr;
- if (get_dispatch_table(pc_instance_table_map, instance)->GetInstanceProcAddr == NULL)
+ if (get_dispatch_table(pc_instance_table_map, instance)
+ ->GetInstanceProcAddr == NULL)
return NULL;
- return get_dispatch_table(pc_instance_table_map, instance)->GetInstanceProcAddr(instance, funcName);
+ return get_dispatch_table(pc_instance_table_map, instance)
+ ->GetInstanceProcAddr(instance, funcName);
}
diff --git a/layers/swapchain.cpp b/layers/swapchain.cpp
index e9472e2..23f1466 100644
--- a/layers/swapchain.cpp
+++ b/layers/swapchain.cpp
@@ -40,72 +40,75 @@
// FIXME/TODO: Make sure this layer is thread-safe!
-
// The following is for logging error messages:
static std::unordered_map<void *, layer_data *> layer_data_map;
-template layer_data *get_my_data_ptr<layer_data>(
- void *data_key,
- std::unordered_map<void *, layer_data *> &data_map);
+template layer_data *
+get_my_data_ptr<layer_data>(void *data_key,
+ std::unordered_map<void *, layer_data *> &data_map);
static const VkExtensionProperties instance_extensions[] = {
- {
- VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
- VK_EXT_DEBUG_REPORT_SPEC_VERSION
- }
-};
+ {VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
- const char *pLayerName,
- uint32_t *pCount,
- VkExtensionProperties* pProperties)
-{
- return util_GetExtensionProperties(1, instance_extensions, pCount, pProperties);
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceExtensionProperties(const char *pLayerName,
+ uint32_t *pCount,
+ VkExtensionProperties *pProperties) {
+ return util_GetExtensionProperties(1, instance_extensions, pCount,
+ pProperties);
}
-static const VkLayerProperties swapchain_global_layers[] = {
- {
- "swapchain",
- VK_API_VERSION,
- VK_MAKE_VERSION(0, 1, 0),
- "Validation layer: swapchain",
- }
-};
+static const VkLayerProperties swapchain_global_layers[] = {{
+ "swapchain", VK_API_VERSION, VK_MAKE_VERSION(0, 1, 0),
+ "Validation layer: swapchain",
+}};
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
- uint32_t *pCount,
- VkLayerProperties* pProperties)
-{
+VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumerateInstanceLayerProperties(uint32_t *pCount,
+ VkLayerProperties *pProperties) {
return util_GetLayerProperties(ARRAY_SIZE(swapchain_global_layers),
- swapchain_global_layers,
- pCount, pProperties);
+ swapchain_global_layers, pCount,
+ pProperties);
}
-static void createDeviceRegisterExtensions(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, VkDevice device)
-{
+static void
+createDeviceRegisterExtensions(VkPhysicalDevice physicalDevice,
+ const VkDeviceCreateInfo *pCreateInfo,
+ VkDevice device) {
uint32_t i;
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ layer_data *my_instance_data =
+ get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- VkLayerDispatchTable *pDisp = my_device_data->device_dispatch_table;
- PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
+ VkLayerDispatchTable *pDisp = my_device_data->device_dispatch_table;
+ PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
- pDisp->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
- pDisp->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
- pDisp->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
- pDisp->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
- pDisp->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
- pDisp->GetDeviceQueue = (PFN_vkGetDeviceQueue) gpa(device, "vkGetDeviceQueue");
+ pDisp->CreateSwapchainKHR =
+ (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
+ pDisp->DestroySwapchainKHR =
+ (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
+ pDisp->GetSwapchainImagesKHR =
+ (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
+ pDisp->AcquireNextImageKHR =
+ (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
+ pDisp->QueuePresentKHR =
+ (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
+ pDisp->GetDeviceQueue =
+ (PFN_vkGetDeviceQueue)gpa(device, "vkGetDeviceQueue");
- SwpPhysicalDevice *pPhysicalDevice = &my_instance_data->physicalDeviceMap[physicalDevice];
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_instance_data->physicalDeviceMap[physicalDevice];
if (pPhysicalDevice) {
my_device_data->deviceMap[device].pPhysicalDevice = pPhysicalDevice;
pPhysicalDevice->pDevice = &my_device_data->deviceMap[device];
} else {
// TBD: Should we leave error in (since Swapchain really needs this
// link)?
- log_msg(my_instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- (uint64_t)physicalDevice , __LINE__, SWAPCHAIN_INVALID_HANDLE, "Swapchain",
+ log_msg(my_instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ (uint64_t)physicalDevice, __LINE__, SWAPCHAIN_INVALID_HANDLE,
+ "Swapchain",
"vkCreateDevice() called with a non-valid VkPhysicalDevice.");
}
my_device_data->deviceMap[device].device = device;
@@ -115,47 +118,75 @@
// No need to check if the extension was advertised by
// vkEnumerateDeviceExtensionProperties(), since the loader handles that.
for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0) {
my_device_data->deviceMap[device].swapchainExtensionEnabled = true;
}
}
}
-static void createInstanceRegisterExtensions(const VkInstanceCreateInfo* pCreateInfo, VkInstance instance)
-{
+static void
+createInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo,
+ VkInstance instance) {
uint32_t i;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- VkLayerInstanceDispatchTable *pDisp = my_data->instance_dispatch_table;
+ layer_data *my_data =
+ get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
+ VkLayerInstanceDispatchTable *pDisp = my_data->instance_dispatch_table;
PFN_vkGetInstanceProcAddr gpa = pDisp->GetInstanceProcAddr;
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- pDisp->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR) gpa(instance, "vkCreateAndroidSurfaceKHR");
+ pDisp->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR)gpa(
+ instance, "vkCreateAndroidSurfaceKHR");
#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
- pDisp->CreateMirSurfaceKHR = (PFN_vkCreateMirSurfaceKHR) gpa(instance, "vkCreateMirSurfaceKHR");
- pDisp->GetPhysicalDeviceMirPresentationSupportKHR = (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceMirPresentationSupportKHR");
+ pDisp->CreateMirSurfaceKHR =
+ (PFN_vkCreateMirSurfaceKHR)gpa(instance, "vkCreateMirSurfaceKHR");
+ pDisp->GetPhysicalDeviceMirPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceMirPresentationSupportKHR");
#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- pDisp->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR) gpa(instance, "vkCreateWaylandSurfaceKHR");
- pDisp->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
+ pDisp->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR)gpa(
+ instance, "vkCreateWaylandSurfaceKHR");
+ pDisp->GetPhysicalDeviceWaylandPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
- pDisp->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) gpa(instance, "vkCreateWin32SurfaceKHR");
- pDisp->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
+ pDisp->CreateWin32SurfaceKHR =
+ (PFN_vkCreateWin32SurfaceKHR)gpa(instance, "vkCreateWin32SurfaceKHR");
+ pDisp->GetPhysicalDeviceWin32PresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
- pDisp->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR) gpa(instance, "vkCreateXcbSurfaceKHR");
- pDisp->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
+ pDisp->CreateXcbSurfaceKHR =
+ (PFN_vkCreateXcbSurfaceKHR)gpa(instance, "vkCreateXcbSurfaceKHR");
+ pDisp->GetPhysicalDeviceXcbPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
- pDisp->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR) gpa(instance, "vkCreateXlibSurfaceKHR");
- pDisp->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
+ pDisp->CreateXlibSurfaceKHR =
+ (PFN_vkCreateXlibSurfaceKHR)gpa(instance, "vkCreateXlibSurfaceKHR");
+ pDisp->GetPhysicalDeviceXlibPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
#endif // VK_USE_PLATFORM_XLIB_KHR
- pDisp->DestroySurfaceKHR = (PFN_vkDestroySurfaceKHR) gpa(instance, "vkDestroySurfaceKHR");
- pDisp->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
- pDisp->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
- pDisp->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
- pDisp->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
+ pDisp->DestroySurfaceKHR =
+ (PFN_vkDestroySurfaceKHR)gpa(instance, "vkDestroySurfaceKHR");
+ pDisp->GetPhysicalDeviceSurfaceSupportKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
+ pDisp->GetPhysicalDeviceSurfaceCapabilitiesKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
+ pDisp->GetPhysicalDeviceSurfaceFormatsKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
+ pDisp->GetPhysicalDeviceSurfacePresentModesKHR =
+ (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
// Remember this instance, and whether the VK_KHR_surface extension
// was enabled for it:
@@ -180,2217 +211,2791 @@
my_data->instanceMap[instance].xlibSurfaceExtensionEnabled = false;
#endif // VK_USE_PLATFORM_XLIB_KHR
-
// Record whether the WSI instance extension was enabled for this
// VkInstance. No need to check if the extension was advertised by
// vkEnumerateInstanceExtensionProperties(), since the loader handles that.
for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_SURFACE_EXTENSION_NAME) == 0) {
my_data->instanceMap[instance].surfaceExtensionEnabled = true;
}
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0) {
- my_data->instanceMap[instance].androidSurfaceExtensionEnabled = true;
+ my_data->instanceMap[instance].androidSurfaceExtensionEnabled =
+ true;
#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0) {
- my_data->instanceMap[instance].mirSurfaceExtensionEnabled = true;
+ my_data->instanceMap[instance].mirSurfaceExtensionEnabled =
+ true;
#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0) {
- my_data->instanceMap[instance].waylandSurfaceExtensionEnabled = true;
+ my_data->instanceMap[instance]
+ .waylandSurfaceExtensionEnabled = true;
#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0) {
- my_data->instanceMap[instance].win32SurfaceExtensionEnabled = true;
+ my_data->instanceMap[instance]
+ .win32SurfaceExtensionEnabled = true;
#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0) {
- my_data->instanceMap[instance].xcbSurfaceExtensionEnabled = true;
+ my_data->instanceMap[instance]
+ .xcbSurfaceExtensionEnabled = true;
#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0) {
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_XLIB_SURFACE_EXTENSION_NAME) ==
+ 0) {
- my_data->instanceMap[instance].xlibSurfaceExtensionEnabled = true;
+ my_data->instanceMap[instance]
+ .xlibSurfaceExtensionEnabled = true;
#endif // VK_USE_PLATFORM_XLIB_KHR
- }
- }
-}
-
-
-#include "vk_dispatch_table_helper.h"
-static void initSwapchain(layer_data *my_data, const VkAllocationCallbacks *pAllocator)
-{
- uint32_t report_flags = 0;
- uint32_t debug_action = 0;
- FILE *log_output = NULL;
- const char *option_str;
- VkDebugReportCallbackEXT callback;
-
- // Initialize Swapchain options:
- report_flags = getLayerOptionFlags("SwapchainReportFlags", 0);
- getLayerOptionEnum("SwapchainDebugAction", (uint32_t *) &debug_action);
-
- if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG)
- {
- // Turn on logging, since it was requested:
- option_str = getLayerOption("SwapchainLogFilename");
- log_output = getLayerLogOutput(option_str, "Swapchain");
- VkDebugReportCallbackCreateInfoEXT dbgInfo;
- memset(&dbgInfo, 0, sizeof(dbgInfo));
- dbgInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
- dbgInfo.pfnCallback = log_callback;
- dbgInfo.pUserData = log_output;
- dbgInfo.flags = report_flags;
- layer_create_msg_callback(my_data->report_data,
- &dbgInfo,
- pAllocator,
- &callback);
- my_data->logging_callback.push_back(callback);
- }
- if (debug_action & VK_DBG_LAYER_ACTION_DEBUG_OUTPUT) {
- VkDebugReportCallbackCreateInfoEXT dbgInfo;
- memset(&dbgInfo, 0, sizeof(dbgInfo));
- dbgInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
- dbgInfo.pfnCallback = win32_debug_output_msg;
- dbgInfo.pUserData = log_output;
- dbgInfo.flags = report_flags;
- layer_create_msg_callback(my_data->report_data, &dbgInfo, pAllocator, &callback);
- my_data->logging_callback.push_back(callback);
- }
-}
-
-static const char *surfaceTransformStr(VkSurfaceTransformFlagBitsKHR value)
-{
- // Return a string corresponding to the value:
- return string_VkSurfaceTransformFlagBitsKHR(value);
-}
-
-static const char *surfaceCompositeAlphaStr(VkCompositeAlphaFlagBitsKHR value)
-{
- // Return a string corresponding to the value:
- return string_VkCompositeAlphaFlagBitsKHR(value);
-}
-
-static const char *presentModeStr(VkPresentModeKHR value)
-{
- // Return a string corresponding to the value:
- return string_VkPresentModeKHR(value);
-}
-
-static const char *sharingModeStr(VkSharingMode value)
-{
- // Return a string corresponding to the value:
- return string_VkSharingMode(value);
-}
-
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance)
-{
- VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
-
- assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
- if (fpCreateInstance == NULL) {
- return VK_ERROR_INITIALIZATION_FAILED;
- }
-
- // Advance the link info for the next element on the chain
- chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
-
- VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
- if (result != VK_SUCCESS) {
- return result;
- }
-
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
- my_data->instance_dispatch_table = new VkLayerInstanceDispatchTable;
- layer_init_instance_dispatch_table(*pInstance, my_data->instance_dispatch_table, fpGetInstanceProcAddr);
-
- my_data->report_data = debug_report_create_instance(
- my_data->instance_dispatch_table,
- *pInstance,
- pCreateInfo->enabledExtensionCount,
- pCreateInfo->ppEnabledExtensionNames);
-
- // Call the following function after my_data is initialized:
- createInstanceRegisterExtensions(pCreateInfo, *pInstance);
- initSwapchain(my_data, pAllocator);
-
- return result;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks* pAllocator)
-{
- VkBool32 skipCall = VK_FALSE;
- dispatch_key key = get_dispatch_key(instance);
- layer_data *my_data = get_my_data_ptr(key, layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- my_data->instance_dispatch_table->DestroyInstance(instance, pAllocator);
-
- // Clean up logging callback, if any
- while (my_data->logging_callback.size() > 0) {
- VkDebugReportCallbackEXT callback = my_data->logging_callback.back();
- layer_destroy_msg_callback(my_data->report_data, callback, pAllocator);
- my_data->logging_callback.pop_back();
- }
- layer_debug_report_destroy_instance(my_data->report_data);
- }
-
- // Regardless of skipCall value, do some internal cleanup:
- if (pInstance) {
- // Delete all of the SwpPhysicalDevice's, SwpSurface's, and the
- // SwpInstance associated with this instance:
- for (auto it = pInstance->physicalDevices.begin() ;
- it != pInstance->physicalDevices.end() ; it++) {
-
- // Free memory that was allocated for/by this SwpPhysicalDevice:
- SwpPhysicalDevice *pPhysicalDevice = it->second;
- if (pPhysicalDevice) {
- LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, instance, "VkInstance",
- SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,
- "%s() called before all of its associated "
- "VkPhysicalDevices were destroyed.",
- __FUNCTION__);
- free(pPhysicalDevice->pSurfaceFormats);
- free(pPhysicalDevice->pPresentModes);
- }
-
- // Erase the SwpPhysicalDevice's from the my_data->physicalDeviceMap (which
- // are simply pointed to by the SwpInstance):
- my_data->physicalDeviceMap.erase(it->second->physicalDevice);
- }
- for (auto it = pInstance->surfaces.begin() ;
- it != pInstance->surfaces.end() ; it++) {
-
- // Free memory that was allocated for/by this SwpPhysicalDevice:
- SwpSurface *pSurface = it->second;
- if (pSurface) {
- LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, instance, "VkInstance",
- SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,
- "%s() called before all of its associated "
- "VkSurfaceKHRs were destroyed.",
- __FUNCTION__);
- }
- }
- my_data->instanceMap.erase(instance);
- }
- delete my_data->instance_dispatch_table;
- layer_data_map.erase(key);
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(
- VkPhysicalDevice physicalDevice,
- uint32_t* pQueueFamilyPropertyCount,
- VkQueueFamilyProperties* pQueueFamilyProperties)
-{
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- my_data->instance_dispatch_table->GetPhysicalDeviceQueueFamilyProperties(
- physicalDevice,
- pQueueFamilyPropertyCount,
- pQueueFamilyProperties);
-
- // Record the result of this query:
- if (pPhysicalDevice &&
- pQueueFamilyPropertyCount && !pQueueFamilyProperties) {
- pPhysicalDevice->gotQueueFamilyPropertyCount = true;
- pPhysicalDevice->numOfQueueFamilies =
- *pQueueFamilyPropertyCount;
- }
- }
-}
-
-#ifdef VK_USE_PLATFORM_ANDROID_KHR
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
- VkInstance instance,
- const VkAndroidSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
-
- // Validate that the platform extension was enabled:
- if (pInstance && !pInstance->SurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_x_SURFACE_EXTENSION_NAME);
- }
-
- if (!pCreateInfo) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- } else {
- if (pCreateInfo->sType != VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR) {
- skipCall |= LOG_ERROR_WRONG_STYPE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo",
- "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR");
- }
- if (pCreateInfo->pNext != NULL) {
- skipCall |= LOG_INFO_WRONG_NEXT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- }
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->CreateAndroidSurfaceKHR(
- instance, pCreateInfo, pAllocator, pSurface);
-
- if ((result == VK_SUCCESS) && pInstance && pSurface) {
- // Record the VkSurfaceKHR returned by the ICD:
- my_data->surfaceMap[*pSurface].surface = *pSurface;
- my_data->surfaceMap[*pSurface].pInstance = pInstance;
- my_data->surfaceMap[*pSurface].usedAllocatorToCreate =
- (pAllocator != NULL);
- my_data->surfaceMap[*pSurface].numQueueFamilyIndexSupport = 0;
- my_data->surfaceMap[*pSurface].pQueueFamilyIndexSupport = NULL;
- // Point to the associated SwpInstance:
- pInstance->surfaces[*pSurface] = &my_data->surfaceMap[*pSurface];
- }
-
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
-
-#ifdef VK_USE_PLATFORM_MIR_KHR
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateMirSurfaceKHR(
- VkInstance instance,
- const VkMirSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
-
- // Validate that the platform extension was enabled:
- if (pInstance && !pInstance->mirSurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_MIR_SURFACE_EXTENSION_NAME);
- }
-
- if (!pCreateInfo) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- } else {
- if (pCreateInfo->sType != VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR) {
- skipCall |= LOG_ERROR_WRONG_STYPE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo",
- "VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR");
- }
- if (pCreateInfo->pNext != NULL) {
- skipCall |= LOG_INFO_WRONG_NEXT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- }
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->CreateMirSurfaceKHR(
- instance, pCreateInfo, pAllocator, pSurface);
-
- if ((result == VK_SUCCESS) && pInstance && pSurface) {
- // Record the VkSurfaceKHR returned by the ICD:
- my_data->surfaceMap[*pSurface].surface = *pSurface;
- my_data->surfaceMap[*pSurface].pInstance = pInstance;
- my_data->surfaceMap[*pSurface].usedAllocatorToCreate =
- (pAllocator != NULL);
- my_data->surfaceMap[*pSurface].numQueueFamilyIndexSupport = 0;
- my_data->surfaceMap[*pSurface].pQueueFamilyIndexSupport = NULL;
- // Point to the associated SwpInstance:
- pInstance->surfaces[*pSurface] = &my_data->surfaceMap[*pSurface];
- }
-
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceMirPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- MirConnection* connection)
-{
- VkBool32 result = VK_FALSE;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
-
- // Validate that the platform extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->mirSurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_MIR_SURFACE_EXTENSION_NAME);
- }
- if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
- (queueFamilyIndex >= pPhysicalDevice->numOfQueueFamilies)) {
- skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- pPhysicalDevice,
- "VkPhysicalDevice",
- queueFamilyIndex,
- pPhysicalDevice->numOfQueueFamilies);
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceMirPresentationSupportKHR(
- physicalDevice, queueFamilyIndex, connection);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_MIR_KHR
-
-#ifdef VK_USE_PLATFORM_WAYLAND_KHR
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(
- VkInstance instance,
- const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
-
- // Validate that the platform extension was enabled:
- if (pInstance && !pInstance->waylandSurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
- }
-
- if (!pCreateInfo) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- } else {
- if (pCreateInfo->sType != VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR) {
- skipCall |= LOG_ERROR_WRONG_STYPE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo",
- "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR");
- }
- if (pCreateInfo->pNext != NULL) {
- skipCall |= LOG_INFO_WRONG_NEXT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- }
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->CreateWaylandSurfaceKHR(
- instance, pCreateInfo, pAllocator, pSurface);
-
- if ((result == VK_SUCCESS) && pInstance && pSurface) {
- // Record the VkSurfaceKHR returned by the ICD:
- my_data->surfaceMap[*pSurface].surface = *pSurface;
- my_data->surfaceMap[*pSurface].pInstance = pInstance;
- my_data->surfaceMap[*pSurface].usedAllocatorToCreate =
- (pAllocator != NULL);
- my_data->surfaceMap[*pSurface].numQueueFamilyIndexSupport = 0;
- my_data->surfaceMap[*pSurface].pQueueFamilyIndexSupport = NULL;
- // Point to the associated SwpInstance:
- pInstance->surfaces[*pSurface] = &my_data->surfaceMap[*pSurface];
- }
-
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- struct wl_display* display)
-{
- VkBool32 result = VK_FALSE;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
-
- // Validate that the platform extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->waylandSurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
- }
- if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
- (queueFamilyIndex >= pPhysicalDevice->numOfQueueFamilies)) {
- skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- pPhysicalDevice,
- "VkPhysicalDevice",
- queueFamilyIndex,
- pPhysicalDevice->numOfQueueFamilies);
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceWaylandPresentationSupportKHR(
- physicalDevice, queueFamilyIndex, display);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
-
-#ifdef VK_USE_PLATFORM_WIN32_KHR
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(
- VkInstance instance,
- const VkWin32SurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
-
- // Validate that the platform extension was enabled:
- if (pInstance && !pInstance->win32SurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
- }
-
- if (!pCreateInfo) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- } else {
- if (pCreateInfo->sType != VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR) {
- skipCall |= LOG_ERROR_WRONG_STYPE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo",
- "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR");
- }
- if (pCreateInfo->pNext != NULL) {
- skipCall |= LOG_INFO_WRONG_NEXT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- }
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->CreateWin32SurfaceKHR(
- instance, pCreateInfo, pAllocator, pSurface);
-
- if ((result == VK_SUCCESS) && pInstance && pSurface) {
- // Record the VkSurfaceKHR returned by the ICD:
- my_data->surfaceMap[*pSurface].surface = *pSurface;
- my_data->surfaceMap[*pSurface].pInstance = pInstance;
- my_data->surfaceMap[*pSurface].usedAllocatorToCreate =
- (pAllocator != NULL);
- my_data->surfaceMap[*pSurface].numQueueFamilyIndexSupport = 0;
- my_data->surfaceMap[*pSurface].pQueueFamilyIndexSupport = NULL;
- // Point to the associated SwpInstance:
- pInstance->surfaces[*pSurface] = &my_data->surfaceMap[*pSurface];
- }
-
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex)
-{
- VkBool32 result = VK_FALSE;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
-
- // Validate that the platform extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->win32SurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
- }
- if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
- (queueFamilyIndex >= pPhysicalDevice->numOfQueueFamilies)) {
- skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- pPhysicalDevice,
- "VkPhysicalDevice",
- queueFamilyIndex,
- pPhysicalDevice->numOfQueueFamilies);
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceWin32PresentationSupportKHR(
- physicalDevice, queueFamilyIndex);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_WIN32_KHR
-
-#ifdef VK_USE_PLATFORM_XCB_KHR
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(
- VkInstance instance,
- const VkXcbSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
-
- // Validate that the platform extension was enabled:
- if (pInstance && !pInstance->xcbSurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_XCB_SURFACE_EXTENSION_NAME);
- }
-
- if (!pCreateInfo) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- } else {
- if (pCreateInfo->sType != VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR) {
- skipCall |= LOG_ERROR_WRONG_STYPE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo",
- "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR");
- }
- if (pCreateInfo->pNext != NULL) {
- skipCall |= LOG_INFO_WRONG_NEXT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- }
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->CreateXcbSurfaceKHR(
- instance, pCreateInfo, pAllocator, pSurface);
-
- if ((result == VK_SUCCESS) && pInstance && pSurface) {
- // Record the VkSurfaceKHR returned by the ICD:
- my_data->surfaceMap[*pSurface].surface = *pSurface;
- my_data->surfaceMap[*pSurface].pInstance = pInstance;
- my_data->surfaceMap[*pSurface].usedAllocatorToCreate =
- (pAllocator != NULL);
- my_data->surfaceMap[*pSurface].numQueueFamilyIndexSupport = 0;
- my_data->surfaceMap[*pSurface].pQueueFamilyIndexSupport = NULL;
- // Point to the associated SwpInstance:
- pInstance->surfaces[*pSurface] = &my_data->surfaceMap[*pSurface];
- }
-
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- xcb_connection_t* connection,
- xcb_visualid_t visual_id)
-{
- VkBool32 result = VK_FALSE;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
-
- // Validate that the platform extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->xcbSurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_XCB_SURFACE_EXTENSION_NAME);
- }
- if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
- (queueFamilyIndex >= pPhysicalDevice->numOfQueueFamilies)) {
- skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- pPhysicalDevice,
- "VkPhysicalDevice",
- queueFamilyIndex,
- pPhysicalDevice->numOfQueueFamilies);
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceXcbPresentationSupportKHR(
- physicalDevice, queueFamilyIndex, connection, visual_id);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_XCB_KHR
-
-#ifdef VK_USE_PLATFORM_XLIB_KHR
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(
- VkInstance instance,
- const VkXlibSurfaceCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSurfaceKHR* pSurface)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
-
- // Validate that the platform extension was enabled:
- if (pInstance && !pInstance->xlibSurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
- }
-
- if (!pCreateInfo) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- } else {
- if (pCreateInfo->sType != VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR) {
- skipCall |= LOG_ERROR_WRONG_STYPE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo",
- "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR");
- }
- if (pCreateInfo->pNext != NULL) {
- skipCall |= LOG_INFO_WRONG_NEXT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- }
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->CreateXlibSurfaceKHR(
- instance, pCreateInfo, pAllocator, pSurface);
-
- if ((result == VK_SUCCESS) && pInstance && pSurface) {
- // Record the VkSurfaceKHR returned by the ICD:
- my_data->surfaceMap[*pSurface].surface = *pSurface;
- my_data->surfaceMap[*pSurface].pInstance = pInstance;
- my_data->surfaceMap[*pSurface].usedAllocatorToCreate =
- (pAllocator != NULL);
- my_data->surfaceMap[*pSurface].numQueueFamilyIndexSupport = 0;
- my_data->surfaceMap[*pSurface].pQueueFamilyIndexSupport = NULL;
- // Point to the associated SwpInstance:
- pInstance->surfaces[*pSurface] = &my_data->surfaceMap[*pSurface];
- }
-
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- Display* dpy,
- VisualID visualID)
-{
- VkBool32 result = VK_FALSE;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
-
- // Validate that the platform extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->xlibSurfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
- }
- if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
- (queueFamilyIndex >= pPhysicalDevice->numOfQueueFamilies)) {
- skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- pPhysicalDevice,
- "VkPhysicalDevice",
- queueFamilyIndex,
- pPhysicalDevice->numOfQueueFamilies);
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceXlibPresentationSupportKHR(
- physicalDevice, queueFamilyIndex, dpy, visualID);
- }
- return result;
-}
-#endif // VK_USE_PLATFORM_XLIB_KHR
-
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator)
-{
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
- SwpSurface *pSurface = &my_data->surfaceMap[surface];
-
- // Regardless of skipCall value, do some internal cleanup:
- if (pSurface) {
- // Delete the SwpSurface associated with this surface:
- if (pSurface->pInstance) {
- pSurface->pInstance->surfaces.erase(surface);
- }
- if (!pSurface->swapchains.empty()) {
- LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, instance, "VkInstance",
- SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,
- "%s() called before all of its associated "
- "VkSwapchainKHRs were destroyed.",
- __FUNCTION__);
- // Empty and then delete all SwpSwapchain's
- for (auto it = pSurface->swapchains.begin() ;
- it != pSurface->swapchains.end() ; it++) {
- // Delete all SwpImage's
- it->second->images.clear();
- // In case the swapchain's device hasn't been destroyed yet
- // (which isn't likely, but is possible), delete its
- // association with this swapchain (i.e. so we can't point to
- // this swpchain from that device, later on):
- if (it->second->pDevice) {
- it->second->pDevice->swapchains.clear();
- }
- }
- pSurface->swapchains.clear();
- }
- if ((pAllocator != NULL) != pSurface->usedAllocatorToCreate) {
- LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, instance, "VkInstance",
- SWAPCHAIN_INCOMPATIBLE_ALLOCATOR,
- "%s() called with incompatible pAllocator from when "
- "the object was created.",
- __FUNCTION__);
- }
- my_data->surfaceMap.erase(surface);
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- my_data->instance_dispatch_table->DestroySurfaceKHR(
- instance, surface, pAllocator);
- }
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- SwpInstance *pInstance = &(my_data->instanceMap[instance]);
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->EnumeratePhysicalDevices(
- instance, pPhysicalDeviceCount, pPhysicalDevices);
-
- if ((result == VK_SUCCESS) && pInstance && pPhysicalDevices &&
- (*pPhysicalDeviceCount > 0)) {
- // Record the VkPhysicalDevices returned by the ICD:
- for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
- my_data->physicalDeviceMap[pPhysicalDevices[i]].physicalDevice =
- pPhysicalDevices[i];
- my_data->physicalDeviceMap[pPhysicalDevices[i]].pInstance = pInstance;
- my_data->physicalDeviceMap[pPhysicalDevices[i]].pDevice = NULL;
- my_data->physicalDeviceMap[pPhysicalDevices[i]].gotQueueFamilyPropertyCount = false;
- my_data->physicalDeviceMap[pPhysicalDevices[i]].gotSurfaceCapabilities = false;
- my_data->physicalDeviceMap[pPhysicalDevices[i]].surfaceFormatCount = 0;
- my_data->physicalDeviceMap[pPhysicalDevices[i]].pSurfaceFormats = NULL;
- my_data->physicalDeviceMap[pPhysicalDevices[i]].presentModeCount = 0;
- my_data->physicalDeviceMap[pPhysicalDevices[i]].pPresentModes = NULL;
- // Point to the associated SwpInstance:
- if (pInstance) {
- pInstance->physicalDevices[pPhysicalDevices[i]] =
- &my_data->physicalDeviceMap[pPhysicalDevices[i]];
- }
- }
- }
-
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice)
-{
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
-
- assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
- if (fpCreateDevice == NULL) {
- return VK_ERROR_INITIALIZATION_FAILED;
- }
-
- // Advance the link info for the next element on the chain
- chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
-
- VkResult result = fpCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
- if (result != VK_SUCCESS) {
- return result;
- }
-
- layer_data *my_instance_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(*pDevice), layer_data_map);
-
- // Setup device dispatch table
- my_device_data->device_dispatch_table = new VkLayerDispatchTable;
- layer_init_device_dispatch_table(*pDevice, my_device_data->device_dispatch_table, fpGetDeviceProcAddr);
-
- my_device_data->report_data = layer_debug_report_create_device(my_instance_data->report_data, *pDevice);
- createDeviceRegisterExtensions(physicalDevice, pCreateInfo, *pDevice);
-
- return result;
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator)
-{
- VkBool32 skipCall = VK_FALSE;
- dispatch_key key = get_dispatch_key(device);
- layer_data *my_data = get_my_data_ptr(key, layer_data_map);
- SwpDevice *pDevice = &my_data->deviceMap[device];
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- my_data->device_dispatch_table->DestroyDevice(device, pAllocator);
- }
-
- // Regardless of skipCall value, do some internal cleanup:
- if (pDevice) {
- // Delete the SwpDevice associated with this device:
- if (pDevice->pPhysicalDevice) {
- pDevice->pPhysicalDevice->pDevice = NULL;
- }
- if (!pDevice->swapchains.empty()) {
- LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,
- "%s() called before all of its associated "
- "VkSwapchainKHRs were destroyed.",
- __FUNCTION__);
- // Empty and then delete all SwpSwapchain's
- for (auto it = pDevice->swapchains.begin() ;
- it != pDevice->swapchains.end() ; it++) {
- // Delete all SwpImage's
- it->second->images.clear();
- // In case the swapchain's surface hasn't been destroyed yet
- // (which is likely) delete its association with this swapchain
- // (i.e. so we can't point to this swpchain from that surface,
- // later on):
- if (it->second->pSurface) {
- it->second->pSurface->swapchains.clear();
- }
- }
- pDevice->swapchains.clear();
- }
- my_data->deviceMap.erase(device);
- }
- delete my_data->device_dispatch_table;
- layer_data_map.erase(key);
-}
-
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
- VkPhysicalDevice physicalDevice,
- uint32_t queueFamilyIndex,
- VkSurfaceKHR surface,
- VkBool32* pSupported)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
-
- // Validate that the surface extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->surfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_SURFACE_EXTENSION_NAME);
- }
- if (!pPhysicalDevice->gotQueueFamilyPropertyCount) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- pPhysicalDevice,
- "VkPhysicalDevice",
- SWAPCHAIN_DID_NOT_QUERY_QUEUE_FAMILIES,
- "%s() called before calling the "
- "vkGetPhysicalDeviceQueueFamilyProperties "
- "function.",
- __FUNCTION__);
- } else if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
- (queueFamilyIndex >= pPhysicalDevice->numOfQueueFamilies)) {
- skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- pPhysicalDevice,
- "VkPhysicalDevice",
- queueFamilyIndex,
- pPhysicalDevice->numOfQueueFamilies);
- }
- if (!pSupported) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- physicalDevice,
- "pSupported");
- }
-
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceSurfaceSupportKHR(
- physicalDevice, queueFamilyIndex, surface,
- pSupported);
-
- if ((result == VK_SUCCESS) && pSupported && pPhysicalDevice) {
- // Record the result of this query:
- SwpInstance *pInstance = pPhysicalDevice->pInstance;
- SwpSurface *pSurface =
- (pInstance) ? pInstance->surfaces[surface] : NULL;
- if (pSurface) {
- pPhysicalDevice->supportedSurfaces[surface] = pSurface;
- if (!pSurface->numQueueFamilyIndexSupport) {
- if (pPhysicalDevice->gotQueueFamilyPropertyCount) {
- pSurface->pQueueFamilyIndexSupport = (VkBool32 *)
- malloc(pPhysicalDevice->numOfQueueFamilies *
- sizeof(VkBool32));
- if (pSurface->pQueueFamilyIndexSupport != NULL) {
- pSurface->numQueueFamilyIndexSupport =
- pPhysicalDevice->numOfQueueFamilies;
+ }
}
}
- }
- if (pSurface->numQueueFamilyIndexSupport) {
- pSurface->pQueueFamilyIndexSupport[queueFamilyIndex] =
- *pSupported;
- }
- }
- }
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
+#include "vk_dispatch_table_helper.h"
+ static void initSwapchain(
+ layer_data * my_data,
+ const VkAllocationCallbacks *pAllocator) {
+ uint32_t report_flags = 0;
+ uint32_t debug_action = 0;
+ FILE *log_output = NULL;
+ const char *option_str;
+ VkDebugReportCallbackEXT callback;
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- VkSurfaceCapabilitiesKHR* pSurfaceCapabilities)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
+ // Initialize Swapchain options:
+ report_flags =
+ getLayerOptionFlags("SwapchainReportFlags", 0);
+ getLayerOptionEnum("SwapchainDebugAction",
+ (uint32_t *)&debug_action);
- // Validate that the surface extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->surfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_SURFACE_EXTENSION_NAME);
- }
- if (!pSurfaceCapabilities) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- physicalDevice,
- "pSurfaceCapabilities");
- }
+ if (debug_action & VK_DBG_LAYER_ACTION_LOG_MSG) {
+ // Turn on logging, since it was requested:
+ option_str = getLayerOption("SwapchainLogFilename");
+ log_output =
+ getLayerLogOutput(option_str, "Swapchain");
+ VkDebugReportCallbackCreateInfoEXT dbgInfo;
+ memset(&dbgInfo, 0, sizeof(dbgInfo));
+ dbgInfo.sType =
+ VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
+ dbgInfo.pfnCallback = log_callback;
+ dbgInfo.pUserData = log_output;
+ dbgInfo.flags = report_flags;
+ layer_create_msg_callback(my_data->report_data,
+ &dbgInfo, pAllocator,
+ &callback);
+ my_data->logging_callback.push_back(callback);
+ }
+ if (debug_action & VK_DBG_LAYER_ACTION_DEBUG_OUTPUT) {
+ VkDebugReportCallbackCreateInfoEXT dbgInfo;
+ memset(&dbgInfo, 0, sizeof(dbgInfo));
+ dbgInfo.sType =
+ VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
+ dbgInfo.pfnCallback = win32_debug_output_msg;
+ dbgInfo.pUserData = log_output;
+ dbgInfo.flags = report_flags;
+ layer_create_msg_callback(my_data->report_data,
+ &dbgInfo, pAllocator,
+ &callback);
+ my_data->logging_callback.push_back(callback);
+ }
+ }
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceSurfaceCapabilitiesKHR(
- physicalDevice, surface, pSurfaceCapabilities);
+ static const char *surfaceTransformStr(
+ VkSurfaceTransformFlagBitsKHR value) {
+ // Return a string corresponding to the value:
+ return string_VkSurfaceTransformFlagBitsKHR(value);
+ }
- if ((result == VK_SUCCESS) && pPhysicalDevice) {
- // Record the result of this query:
- pPhysicalDevice->gotSurfaceCapabilities = true;
-// FIXME: NEED TO COPY THIS DATA, BECAUSE pSurfaceCapabilities POINTS TO APP-ALLOCATED DATA
- pPhysicalDevice->surfaceCapabilities = *pSurfaceCapabilities;
- }
+ static const char *surfaceCompositeAlphaStr(
+ VkCompositeAlphaFlagBitsKHR value) {
+ // Return a string corresponding to the value:
+ return string_VkCompositeAlphaFlagBitsKHR(value);
+ }
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
+ static const char *presentModeStr(VkPresentModeKHR value) {
+ // Return a string corresponding to the value:
+ return string_VkPresentModeKHR(value);
+ }
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pSurfaceFormatCount,
- VkSurfaceFormatKHR* pSurfaceFormats)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
+ static const char *sharingModeStr(VkSharingMode value) {
+ // Return a string corresponding to the value:
+ return string_VkSharingMode(value);
+ }
- // Validate that the surface extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->surfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_SURFACE_EXTENSION_NAME);
- }
- if (!pSurfaceFormatCount) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- physicalDevice,
- "pSurfaceFormatCount");
- }
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateInstance(
+ const VkInstanceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkInstance *pInstance) {
+ VkLayerInstanceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceSurfaceFormatsKHR(
- physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats);
+ assert(chain_info->u.pLayerInfo);
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo
+ ->pfnNextGetInstanceProcAddr;
+ PFN_vkCreateInstance fpCreateInstance =
+ (PFN_vkCreateInstance)fpGetInstanceProcAddr(
+ NULL, "vkCreateInstance");
+ if (fpCreateInstance == NULL) {
+ return VK_ERROR_INITIALIZATION_FAILED;
+ }
- if ((result == VK_SUCCESS) && pPhysicalDevice && !pSurfaceFormats &&
- pSurfaceFormatCount) {
- // Record the result of this preliminary query:
- pPhysicalDevice->surfaceFormatCount = *pSurfaceFormatCount;
- }
- else if ((result == VK_SUCCESS) && pPhysicalDevice && pSurfaceFormats &&
- pSurfaceFormatCount) {
- // Compare the preliminary value of *pSurfaceFormatCount with the
- // value this time:
- if (*pSurfaceFormatCount > pPhysicalDevice->surfaceFormatCount) {
- LOG_ERROR_INVALID_COUNT(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- physicalDevice,
- "pSurfaceFormatCount",
- "pSurfaceFormats",
- *pSurfaceFormatCount,
+ // Advance the link info for the next element on the
+ // chain
+ chain_info->u.pLayerInfo =
+ chain_info->u.pLayerInfo->pNext;
+
+ VkResult result = fpCreateInstance(
+ pCreateInfo, pAllocator, pInstance);
+ if (result != VK_SUCCESS) {
+ return result;
+ }
+
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(*pInstance), layer_data_map);
+ my_data->instance_dispatch_table =
+ new VkLayerInstanceDispatchTable;
+ layer_init_instance_dispatch_table(
+ *pInstance, my_data->instance_dispatch_table,
+ fpGetInstanceProcAddr);
+
+ my_data->report_data = debug_report_create_instance(
+ my_data->instance_dispatch_table, *pInstance,
+ pCreateInfo->enabledExtensionCount,
+ pCreateInfo->ppEnabledExtensionNames);
+
+ // Call the following function after my_data is
+ // initialized:
+ createInstanceRegisterExtensions(pCreateInfo,
+ *pInstance);
+ initSwapchain(my_data, pAllocator);
+
+ return result;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyInstance(
+ VkInstance instance,
+ const VkAllocationCallbacks *pAllocator) {
+ VkBool32 skipCall = VK_FALSE;
+ dispatch_key key = get_dispatch_key(instance);
+ layer_data *my_data =
+ get_my_data_ptr(key, layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ my_data->instance_dispatch_table->DestroyInstance(
+ instance, pAllocator);
+
+ // Clean up logging callback, if any
+ while (my_data->logging_callback.size() > 0) {
+ VkDebugReportCallbackEXT callback =
+ my_data->logging_callback.back();
+ layer_destroy_msg_callback(
+ my_data->report_data, callback, pAllocator);
+ my_data->logging_callback.pop_back();
+ }
+ layer_debug_report_destroy_instance(
+ my_data->report_data);
+ }
+
+ // Regardless of skipCall value, do some internal
+ // cleanup:
+ if (pInstance) {
+ // Delete all of the SwpPhysicalDevice's,
+ // SwpSurface's, and the
+ // SwpInstance associated with this instance:
+ for (auto it = pInstance->physicalDevices.begin();
+ it != pInstance->physicalDevices.end(); it++) {
+
+ // Free memory that was allocated for/by this
+ // SwpPhysicalDevice:
+ SwpPhysicalDevice *pPhysicalDevice = it->second;
+ if (pPhysicalDevice) {
+ LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ instance, "VkInstance",
+ SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,
+ "%s() called before all of its "
+ "associated "
+ "VkPhysicalDevices were destroyed.",
+ __FUNCTION__);
+ free(pPhysicalDevice->pSurfaceFormats);
+ free(pPhysicalDevice->pPresentModes);
+ }
+
+ // Erase the SwpPhysicalDevice's from the
+ // my_data->physicalDeviceMap (which
+ // are simply pointed to by the SwpInstance):
+ my_data->physicalDeviceMap.erase(
+ it->second->physicalDevice);
+ }
+ for (auto it = pInstance->surfaces.begin();
+ it != pInstance->surfaces.end(); it++) {
+
+ // Free memory that was allocated for/by this
+ // SwpPhysicalDevice:
+ SwpSurface *pSurface = it->second;
+ if (pSurface) {
+ LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ instance, "VkInstance",
+ SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,
+ "%s() called before all of its "
+ "associated "
+ "VkSurfaceKHRs were destroyed.",
+ __FUNCTION__);
+ }
+ }
+ my_data->instanceMap.erase(instance);
+ }
+ delete my_data->instance_dispatch_table;
+ layer_data_map.erase(key);
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkGetPhysicalDeviceQueueFamilyProperties(
+ VkPhysicalDevice physicalDevice,
+ uint32_t * pQueueFamilyPropertyCount,
+ VkQueueFamilyProperties * pQueueFamilyProperties) {
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceQueueFamilyProperties(
+ physicalDevice, pQueueFamilyPropertyCount,
+ pQueueFamilyProperties);
+
+ // Record the result of this query:
+ if (pPhysicalDevice && pQueueFamilyPropertyCount &&
+ !pQueueFamilyProperties) {
+ pPhysicalDevice->gotQueueFamilyPropertyCount =
+ true;
+ pPhysicalDevice->numOfQueueFamilies =
+ *pQueueFamilyPropertyCount;
+ }
+ }
+ }
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateAndroidSurfaceKHR(
+ VkInstance instance,
+ const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSurfaceKHR *pSurface) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+
+ // Validate that the platform extension was enabled:
+ if (pInstance && !pInstance->SurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__, VK_KHR_x_SURFACE_EXTENSION_NAME);
+ }
+
+ if (!pCreateInfo) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pCreateInfo");
+ } else {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR) {
+ skipCall |= LOG_ERROR_WRONG_STYPE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo", "VK_STRUCTURE_TYPE_"
+ "ANDROID_SURFACE_"
+ "CREATE_INFO_KHR");
+ }
+ if (pCreateInfo->pNext != NULL) {
+ skipCall |= LOG_INFO_WRONG_NEXT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo");
+ }
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result = my_data->instance_dispatch_table
+ ->CreateAndroidSurfaceKHR(
+ instance, pCreateInfo, pAllocator,
+ pSurface);
+
+ if ((result == VK_SUCCESS) && pInstance &&
+ pSurface) {
+ // Record the VkSurfaceKHR returned by the ICD:
+ my_data->surfaceMap[*pSurface].surface =
+ *pSurface;
+ my_data->surfaceMap[*pSurface].pInstance =
+ pInstance;
+ my_data->surfaceMap[*pSurface]
+ .usedAllocatorToCreate =
+ (pAllocator != NULL);
+ my_data->surfaceMap[*pSurface]
+ .numQueueFamilyIndexSupport = 0;
+ my_data->surfaceMap[*pSurface]
+ .pQueueFamilyIndexSupport = NULL;
+ // Point to the associated SwpInstance:
+ pInstance->surfaces[*pSurface] =
+ &my_data->surfaceMap[*pSurface];
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+#endif // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateMirSurfaceKHR(
+ VkInstance instance,
+ const VkMirSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSurfaceKHR *pSurface) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+
+ // Validate that the platform extension was enabled:
+ if (pInstance &&
+ !pInstance->mirSurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_MIR_SURFACE_EXTENSION_NAME);
+ }
+
+ if (!pCreateInfo) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pCreateInfo");
+ } else {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR) {
+ skipCall |= LOG_ERROR_WRONG_STYPE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo", "VK_STRUCTURE_TYPE_"
+ "MIR_SURFACE_CREATE_"
+ "INFO_KHR");
+ }
+ if (pCreateInfo->pNext != NULL) {
+ skipCall |= LOG_INFO_WRONG_NEXT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo");
+ }
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->CreateMirSurfaceKHR(instance, pCreateInfo,
+ pAllocator, pSurface);
+
+ if ((result == VK_SUCCESS) && pInstance &&
+ pSurface) {
+ // Record the VkSurfaceKHR returned by the ICD:
+ my_data->surfaceMap[*pSurface].surface =
+ *pSurface;
+ my_data->surfaceMap[*pSurface].pInstance =
+ pInstance;
+ my_data->surfaceMap[*pSurface]
+ .usedAllocatorToCreate =
+ (pAllocator != NULL);
+ my_data->surfaceMap[*pSurface]
+ .numQueueFamilyIndexSupport = 0;
+ my_data->surfaceMap[*pSurface]
+ .pQueueFamilyIndexSupport = NULL;
+ // Point to the associated SwpInstance:
+ pInstance->surfaces[*pSurface] =
+ &my_data->surfaceMap[*pSurface];
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL
+ vkGetPhysicalDeviceMirPresentationSupportKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex,
+ MirConnection * connection) {
+ VkBool32 result = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ // Validate that the platform extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->mirSurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_MIR_SURFACE_EXTENSION_NAME);
+ }
+ if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
+ (queueFamilyIndex >=
+ pPhysicalDevice->numOfQueueFamilies)) {
+ skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ pPhysicalDevice, "VkPhysicalDevice",
+ queueFamilyIndex,
+ pPhysicalDevice->numOfQueueFamilies);
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceMirPresentationSupportKHR(
+ physicalDevice, queueFamilyIndex,
+ connection);
+ }
+ return result;
+ }
+#endif // VK_USE_PLATFORM_MIR_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateWaylandSurfaceKHR(
+ VkInstance instance,
+ const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSurfaceKHR *pSurface) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+
+ // Validate that the platform extension was enabled:
+ if (pInstance &&
+ !pInstance->waylandSurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
+ }
+
+ if (!pCreateInfo) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pCreateInfo");
+ } else {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR) {
+ skipCall |= LOG_ERROR_WRONG_STYPE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo", "VK_STRUCTURE_TYPE_"
+ "WAYLAND_SURFACE_"
+ "CREATE_INFO_KHR");
+ }
+ if (pCreateInfo->pNext != NULL) {
+ skipCall |= LOG_INFO_WRONG_NEXT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo");
+ }
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result = my_data->instance_dispatch_table
+ ->CreateWaylandSurfaceKHR(
+ instance, pCreateInfo, pAllocator,
+ pSurface);
+
+ if ((result == VK_SUCCESS) && pInstance &&
+ pSurface) {
+ // Record the VkSurfaceKHR returned by the ICD:
+ my_data->surfaceMap[*pSurface].surface =
+ *pSurface;
+ my_data->surfaceMap[*pSurface].pInstance =
+ pInstance;
+ my_data->surfaceMap[*pSurface]
+ .usedAllocatorToCreate =
+ (pAllocator != NULL);
+ my_data->surfaceMap[*pSurface]
+ .numQueueFamilyIndexSupport = 0;
+ my_data->surfaceMap[*pSurface]
+ .pQueueFamilyIndexSupport = NULL;
+ // Point to the associated SwpInstance:
+ pInstance->surfaces[*pSurface] =
+ &my_data->surfaceMap[*pSurface];
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL
+ vkGetPhysicalDeviceWaylandPresentationSupportKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex,
+ struct wl_display * display) {
+ VkBool32 result = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ // Validate that the platform extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->waylandSurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
+ }
+ if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
+ (queueFamilyIndex >=
+ pPhysicalDevice->numOfQueueFamilies)) {
+ skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ pPhysicalDevice, "VkPhysicalDevice",
+ queueFamilyIndex,
+ pPhysicalDevice->numOfQueueFamilies);
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceWaylandPresentationSupportKHR(
+ physicalDevice, queueFamilyIndex,
+ display);
+ }
+ return result;
+ }
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateWin32SurfaceKHR(
+ VkInstance instance,
+ const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSurfaceKHR *pSurface) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+
+ // Validate that the platform extension was enabled:
+ if (pInstance &&
+ !pInstance->win32SurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
+ }
+
+ if (!pCreateInfo) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pCreateInfo");
+ } else {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR) {
+ skipCall |= LOG_ERROR_WRONG_STYPE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo", "VK_STRUCTURE_TYPE_"
+ "WIN32_SURFACE_"
+ "CREATE_INFO_KHR");
+ }
+ if (pCreateInfo->pNext != NULL) {
+ skipCall |= LOG_INFO_WRONG_NEXT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo");
+ }
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result = my_data->instance_dispatch_table
+ ->CreateWin32SurfaceKHR(
+ instance, pCreateInfo, pAllocator,
+ pSurface);
+
+ if ((result == VK_SUCCESS) && pInstance &&
+ pSurface) {
+ // Record the VkSurfaceKHR returned by the ICD:
+ my_data->surfaceMap[*pSurface].surface =
+ *pSurface;
+ my_data->surfaceMap[*pSurface].pInstance =
+ pInstance;
+ my_data->surfaceMap[*pSurface]
+ .usedAllocatorToCreate =
+ (pAllocator != NULL);
+ my_data->surfaceMap[*pSurface]
+ .numQueueFamilyIndexSupport = 0;
+ my_data->surfaceMap[*pSurface]
+ .pQueueFamilyIndexSupport = NULL;
+ // Point to the associated SwpInstance:
+ pInstance->surfaces[*pSurface] =
+ &my_data->surfaceMap[*pSurface];
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL
+ vkGetPhysicalDeviceWin32PresentationSupportKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex) {
+ VkBool32 result = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ // Validate that the platform extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->win32SurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
+ }
+ if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
+ (queueFamilyIndex >=
+ pPhysicalDevice->numOfQueueFamilies)) {
+ skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ pPhysicalDevice, "VkPhysicalDevice",
+ queueFamilyIndex,
+ pPhysicalDevice->numOfQueueFamilies);
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceWin32PresentationSupportKHR(
+ physicalDevice, queueFamilyIndex);
+ }
+ return result;
+ }
+#endif // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateXcbSurfaceKHR(
+ VkInstance instance,
+ const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSurfaceKHR *pSurface) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+
+ // Validate that the platform extension was enabled:
+ if (pInstance &&
+ !pInstance->xcbSurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_XCB_SURFACE_EXTENSION_NAME);
+ }
+
+ if (!pCreateInfo) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pCreateInfo");
+ } else {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR) {
+ skipCall |= LOG_ERROR_WRONG_STYPE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo", "VK_STRUCTURE_TYPE_"
+ "XCB_SURFACE_CREATE_"
+ "INFO_KHR");
+ }
+ if (pCreateInfo->pNext != NULL) {
+ skipCall |= LOG_INFO_WRONG_NEXT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo");
+ }
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->CreateXcbSurfaceKHR(instance, pCreateInfo,
+ pAllocator, pSurface);
+
+ if ((result == VK_SUCCESS) && pInstance &&
+ pSurface) {
+ // Record the VkSurfaceKHR returned by the ICD:
+ my_data->surfaceMap[*pSurface].surface =
+ *pSurface;
+ my_data->surfaceMap[*pSurface].pInstance =
+ pInstance;
+ my_data->surfaceMap[*pSurface]
+ .usedAllocatorToCreate =
+ (pAllocator != NULL);
+ my_data->surfaceMap[*pSurface]
+ .numQueueFamilyIndexSupport = 0;
+ my_data->surfaceMap[*pSurface]
+ .pQueueFamilyIndexSupport = NULL;
+ // Point to the associated SwpInstance:
+ pInstance->surfaces[*pSurface] =
+ &my_data->surfaceMap[*pSurface];
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL
+ vkGetPhysicalDeviceXcbPresentationSupportKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex,
+ xcb_connection_t * connection,
+ xcb_visualid_t visual_id) {
+ VkBool32 result = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ // Validate that the platform extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->xcbSurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_XCB_SURFACE_EXTENSION_NAME);
+ }
+ if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
+ (queueFamilyIndex >=
+ pPhysicalDevice->numOfQueueFamilies)) {
+ skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ pPhysicalDevice, "VkPhysicalDevice",
+ queueFamilyIndex,
+ pPhysicalDevice->numOfQueueFamilies);
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceXcbPresentationSupportKHR(
+ physicalDevice, queueFamilyIndex,
+ connection, visual_id);
+ }
+ return result;
+ }
+#endif // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateXlibSurfaceKHR(
+ VkInstance instance,
+ const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSurfaceKHR *pSurface) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+
+ // Validate that the platform extension was enabled:
+ if (pInstance &&
+ !pInstance->xlibSurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
+ }
+
+ if (!pCreateInfo) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pCreateInfo");
+ } else {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR) {
+ skipCall |= LOG_ERROR_WRONG_STYPE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo", "VK_STRUCTURE_TYPE_"
+ "XLIB_SURFACE_"
+ "CREATE_INFO_KHR");
+ }
+ if (pCreateInfo->pNext != NULL) {
+ skipCall |= LOG_INFO_WRONG_NEXT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo");
+ }
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result = my_data->instance_dispatch_table
+ ->CreateXlibSurfaceKHR(
+ instance, pCreateInfo, pAllocator,
+ pSurface);
+
+ if ((result == VK_SUCCESS) && pInstance &&
+ pSurface) {
+ // Record the VkSurfaceKHR returned by the ICD:
+ my_data->surfaceMap[*pSurface].surface =
+ *pSurface;
+ my_data->surfaceMap[*pSurface].pInstance =
+ pInstance;
+ my_data->surfaceMap[*pSurface]
+ .usedAllocatorToCreate =
+ (pAllocator != NULL);
+ my_data->surfaceMap[*pSurface]
+ .numQueueFamilyIndexSupport = 0;
+ my_data->surfaceMap[*pSurface]
+ .pQueueFamilyIndexSupport = NULL;
+ // Point to the associated SwpInstance:
+ pInstance->surfaces[*pSurface] =
+ &my_data->surfaceMap[*pSurface];
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkBool32 VKAPI_CALL
+ vkGetPhysicalDeviceXlibPresentationSupportKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex, Display * dpy,
+ VisualID visualID) {
+ VkBool32 result = VK_FALSE;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ // Validate that the platform extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->xlibSurfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__,
+ VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
+ }
+ if (pPhysicalDevice->gotQueueFamilyPropertyCount &&
+ (queueFamilyIndex >=
+ pPhysicalDevice->numOfQueueFamilies)) {
+ skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ pPhysicalDevice, "VkPhysicalDevice",
+ queueFamilyIndex,
+ pPhysicalDevice->numOfQueueFamilies);
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceXlibPresentationSupportKHR(
+ physicalDevice, queueFamilyIndex, dpy,
+ visualID);
+ }
+ return result;
+ }
+#endif // VK_USE_PLATFORM_XLIB_KHR
+
+ VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroySurfaceKHR(
+ VkInstance instance, VkSurfaceKHR surface,
+ const VkAllocationCallbacks *pAllocator) {
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+ SwpSurface *pSurface = &my_data->surfaceMap[surface];
+
+ // Regardless of skipCall value, do some internal
+ // cleanup:
+ if (pSurface) {
+ // Delete the SwpSurface associated with this
+ // surface:
+ if (pSurface->pInstance) {
+ pSurface->pInstance->surfaces.erase(surface);
+ }
+ if (!pSurface->swapchains.empty()) {
+ LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ instance, "VkInstance",
+ SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,
+ "%s() called before all of its associated "
+ "VkSwapchainKHRs were destroyed.",
+ __FUNCTION__);
+ // Empty and then delete all SwpSwapchain's
+ for (auto it = pSurface->swapchains.begin();
+ it != pSurface->swapchains.end(); it++) {
+ // Delete all SwpImage's
+ it->second->images.clear();
+ // In case the swapchain's device hasn't
+ // been destroyed yet
+ // (which isn't likely, but is possible),
+ // delete its
+ // association with this swapchain (i.e. so
+ // we can't point to
+ // this swpchain from that device, later
+ // on):
+ if (it->second->pDevice) {
+ it->second->pDevice->swapchains.clear();
+ }
+ }
+ pSurface->swapchains.clear();
+ }
+ if ((pAllocator != NULL) !=
+ pSurface->usedAllocatorToCreate) {
+ LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ instance, "VkInstance",
+ SWAPCHAIN_INCOMPATIBLE_ALLOCATOR,
+ "%s() called with incompatible pAllocator "
+ "from when "
+ "the object was created.",
+ __FUNCTION__);
+ }
+ my_data->surfaceMap.erase(surface);
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ my_data->instance_dispatch_table->DestroySurfaceKHR(
+ instance, surface, pAllocator);
+ }
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkEnumeratePhysicalDevices(
+ VkInstance instance,
+ uint32_t * pPhysicalDeviceCount,
+ VkPhysicalDevice * pPhysicalDevices) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ SwpInstance *pInstance =
+ &(my_data->instanceMap[instance]);
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result = my_data->instance_dispatch_table
+ ->EnumeratePhysicalDevices(
+ instance, pPhysicalDeviceCount,
+ pPhysicalDevices);
+
+ if ((result == VK_SUCCESS) && pInstance &&
+ pPhysicalDevices &&
+ (*pPhysicalDeviceCount > 0)) {
+ // Record the VkPhysicalDevices returned by the
+ // ICD:
+ for (uint32_t i = 0; i < *pPhysicalDeviceCount;
+ i++) {
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .physicalDevice = pPhysicalDevices[i];
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .pInstance = pInstance;
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .pDevice = NULL;
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .gotQueueFamilyPropertyCount = false;
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .gotSurfaceCapabilities = false;
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .surfaceFormatCount = 0;
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .pSurfaceFormats = NULL;
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .presentModeCount = 0;
+ my_data
+ ->physicalDeviceMap[pPhysicalDevices[i]]
+ .pPresentModes = NULL;
+ // Point to the associated SwpInstance:
+ if (pInstance) {
+ pInstance->physicalDevices
+ [pPhysicalDevices[i]] =
+ &my_data->physicalDeviceMap
+ [pPhysicalDevices[i]];
+ }
+ }
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDevice(VkPhysicalDevice physicalDevice,
+ const VkDeviceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDevice *pDevice) {
+ VkLayerDeviceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+
+ assert(chain_info->u.pLayerInfo);
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo
+ ->pfnNextGetInstanceProcAddr;
+ PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+ PFN_vkCreateDevice fpCreateDevice =
+ (PFN_vkCreateDevice)fpGetInstanceProcAddr(
+ NULL, "vkCreateDevice");
+ if (fpCreateDevice == NULL) {
+ return VK_ERROR_INITIALIZATION_FAILED;
+ }
+
+ // Advance the link info for the next element on the
+ // chain
+ chain_info->u.pLayerInfo =
+ chain_info->u.pLayerInfo->pNext;
+
+ VkResult result = fpCreateDevice(
+ physicalDevice, pCreateInfo, pAllocator, pDevice);
+ if (result != VK_SUCCESS) {
+ return result;
+ }
+
+ layer_data *my_instance_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ layer_data *my_device_data = get_my_data_ptr(
+ get_dispatch_key(*pDevice), layer_data_map);
+
+ // Setup device dispatch table
+ my_device_data->device_dispatch_table =
+ new VkLayerDispatchTable;
+ layer_init_device_dispatch_table(
+ *pDevice, my_device_data->device_dispatch_table,
+ fpGetDeviceProcAddr);
+
+ my_device_data->report_data =
+ layer_debug_report_create_device(
+ my_instance_data->report_data, *pDevice);
+ createDeviceRegisterExtensions(physicalDevice,
+ pCreateInfo, *pDevice);
+
+ return result;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(
+ VkDevice device,
+ const VkAllocationCallbacks *pAllocator) {
+ VkBool32 skipCall = VK_FALSE;
+ dispatch_key key = get_dispatch_key(device);
+ layer_data *my_data =
+ get_my_data_ptr(key, layer_data_map);
+ SwpDevice *pDevice = &my_data->deviceMap[device];
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ my_data->device_dispatch_table->DestroyDevice(
+ device, pAllocator);
+ }
+
+ // Regardless of skipCall value, do some internal
+ // cleanup:
+ if (pDevice) {
+ // Delete the SwpDevice associated with this device:
+ if (pDevice->pPhysicalDevice) {
+ pDevice->pPhysicalDevice->pDevice = NULL;
+ }
+ if (!pDevice->swapchains.empty()) {
+ LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN,
+ "%s() called before all of its associated "
+ "VkSwapchainKHRs were destroyed.",
+ __FUNCTION__);
+ // Empty and then delete all SwpSwapchain's
+ for (auto it = pDevice->swapchains.begin();
+ it != pDevice->swapchains.end(); it++) {
+ // Delete all SwpImage's
+ it->second->images.clear();
+ // In case the swapchain's surface hasn't
+ // been destroyed yet
+ // (which is likely) delete its association
+ // with this swapchain
+ // (i.e. so we can't point to this swpchain
+ // from that surface,
+ // later on):
+ if (it->second->pSurface) {
+ it->second->pSurface->swapchains
+ .clear();
+ }
+ }
+ pDevice->swapchains.clear();
+ }
+ my_data->deviceMap.erase(device);
+ }
+ delete my_data->device_dispatch_table;
+ layer_data_map.erase(key);
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetPhysicalDeviceSurfaceSupportKHR(
+ VkPhysicalDevice physicalDevice,
+ uint32_t queueFamilyIndex, VkSurfaceKHR surface,
+ VkBool32 * pSupported) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ // Validate that the surface extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->surfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__, VK_KHR_SURFACE_EXTENSION_NAME);
+ }
+ if (!pPhysicalDevice->gotQueueFamilyPropertyCount) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ pPhysicalDevice, "VkPhysicalDevice",
+ SWAPCHAIN_DID_NOT_QUERY_QUEUE_FAMILIES,
+ "%s() called before calling the "
+ "vkGetPhysicalDeviceQueueFamilyProperties "
+ "function.",
+ __FUNCTION__);
+ } else if (pPhysicalDevice
+ ->gotQueueFamilyPropertyCount &&
+ (queueFamilyIndex >=
+ pPhysicalDevice->numOfQueueFamilies)) {
+ skipCall |= LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ pPhysicalDevice, "VkPhysicalDevice",
+ queueFamilyIndex,
+ pPhysicalDevice->numOfQueueFamilies);
+ }
+ if (!pSupported) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ physicalDevice, "pSupported");
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result = my_data->instance_dispatch_table
+ ->GetPhysicalDeviceSurfaceSupportKHR(
+ physicalDevice, queueFamilyIndex,
+ surface, pSupported);
+
+ if ((result == VK_SUCCESS) && pSupported &&
+ pPhysicalDevice) {
+ // Record the result of this query:
+ SwpInstance *pInstance =
+ pPhysicalDevice->pInstance;
+ SwpSurface *pSurface =
+ (pInstance) ? pInstance->surfaces[surface]
+ : NULL;
+ if (pSurface) {
+ pPhysicalDevice
+ ->supportedSurfaces[surface] = pSurface;
+ if (!pSurface->numQueueFamilyIndexSupport) {
+ if (pPhysicalDevice
+ ->gotQueueFamilyPropertyCount) {
+ pSurface->pQueueFamilyIndexSupport =
+ (VkBool32 *)malloc(
+ pPhysicalDevice
+ ->numOfQueueFamilies *
+ sizeof(VkBool32));
+ if (pSurface
+ ->pQueueFamilyIndexSupport !=
+ NULL) {
+ pSurface
+ ->numQueueFamilyIndexSupport =
+ pPhysicalDevice
+ ->numOfQueueFamilies;
+ }
+ }
+ }
+ if (pSurface->numQueueFamilyIndexSupport) {
+ pSurface->pQueueFamilyIndexSupport
+ [queueFamilyIndex] = *pSupported;
+ }
+ }
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+ VkPhysicalDevice physicalDevice,
+ VkSurfaceKHR surface,
+ VkSurfaceCapabilitiesKHR * pSurfaceCapabilities) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ // Validate that the surface extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->surfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__, VK_KHR_SURFACE_EXTENSION_NAME);
+ }
+ if (!pSurfaceCapabilities) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ physicalDevice, "pSurfaceCapabilities");
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceSurfaceCapabilitiesKHR(
+ physicalDevice, surface,
+ pSurfaceCapabilities);
+
+ if ((result == VK_SUCCESS) && pPhysicalDevice) {
+ // Record the result of this query:
+ pPhysicalDevice->gotSurfaceCapabilities = true;
+ // FIXME: NEED TO COPY THIS DATA, BECAUSE
+ // pSurfaceCapabilities POINTS TO APP-ALLOCATED
+ // DATA
+ pPhysicalDevice->surfaceCapabilities =
+ *pSurfaceCapabilities;
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
+
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetPhysicalDeviceSurfaceFormatsKHR(
+ VkPhysicalDevice physicalDevice,
+ VkSurfaceKHR surface,
+ uint32_t * pSurfaceFormatCount,
+ VkSurfaceFormatKHR * pSurfaceFormats) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
+
+ // Validate that the surface extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->surfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__, VK_KHR_SURFACE_EXTENSION_NAME);
+ }
+ if (!pSurfaceFormatCount) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ physicalDevice, "pSurfaceFormatCount");
+ }
+
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceSurfaceFormatsKHR(
+ physicalDevice, surface,
+ pSurfaceFormatCount, pSurfaceFormats);
+
+ if ((result == VK_SUCCESS) && pPhysicalDevice &&
+ !pSurfaceFormats && pSurfaceFormatCount) {
+ // Record the result of this preliminary query:
+ pPhysicalDevice->surfaceFormatCount =
+ *pSurfaceFormatCount;
+ } else if ((result == VK_SUCCESS) &&
+ pPhysicalDevice && pSurfaceFormats &&
+ pSurfaceFormatCount) {
+ // Compare the preliminary value of
+ // *pSurfaceFormatCount with the
+ // value this time:
+ if (*pSurfaceFormatCount >
+ pPhysicalDevice->surfaceFormatCount) {
+ LOG_ERROR_INVALID_COUNT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ physicalDevice, "pSurfaceFormatCount",
+ "pSurfaceFormats", *pSurfaceFormatCount,
pPhysicalDevice->surfaceFormatCount);
- }
- else if (*pSurfaceFormatCount > 0) {
- // Record the result of this query:
- pPhysicalDevice->surfaceFormatCount = *pSurfaceFormatCount;
- pPhysicalDevice->pSurfaceFormats = (VkSurfaceFormatKHR *)
- malloc(*pSurfaceFormatCount * sizeof(VkSurfaceFormatKHR));
- if (pPhysicalDevice->pSurfaceFormats) {
- for (uint32_t i = 0 ; i < *pSurfaceFormatCount ; i++) {
- pPhysicalDevice->pSurfaceFormats[i] = pSurfaceFormats[i];
+ } else if (*pSurfaceFormatCount > 0) {
+ // Record the result of this query:
+ pPhysicalDevice->surfaceFormatCount =
+ *pSurfaceFormatCount;
+ pPhysicalDevice->pSurfaceFormats =
+ (VkSurfaceFormatKHR *)malloc(
+ *pSurfaceFormatCount *
+ sizeof(VkSurfaceFormatKHR));
+ if (pPhysicalDevice->pSurfaceFormats) {
+ for (uint32_t i = 0;
+ i < *pSurfaceFormatCount; i++) {
+ pPhysicalDevice
+ ->pSurfaceFormats[i] =
+ pSurfaceFormats[i];
+ }
+ } else {
+ pPhysicalDevice->surfaceFormatCount = 0;
+ }
+ }
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
}
- } else {
- pPhysicalDevice->surfaceFormatCount = 0;
- }
- }
- }
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetPhysicalDeviceSurfacePresentModesKHR(
+ VkPhysicalDevice physicalDevice,
+ VkSurfaceKHR surface, uint32_t * pPresentModeCount,
+ VkPresentModeKHR * pPresentModes) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(physicalDevice), layer_data_map);
+ SwpPhysicalDevice *pPhysicalDevice =
+ &my_data->physicalDeviceMap[physicalDevice];
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
- VkPhysicalDevice physicalDevice,
- VkSurfaceKHR surface,
- uint32_t* pPresentModeCount,
- VkPresentModeKHR* pPresentModes)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), layer_data_map);
- SwpPhysicalDevice *pPhysicalDevice = &my_data->physicalDeviceMap[physicalDevice];
+ // Validate that the surface extension was enabled:
+ if (pPhysicalDevice && pPhysicalDevice->pInstance &&
+ !pPhysicalDevice->pInstance
+ ->surfaceExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+ pPhysicalDevice->pInstance, "VkInstance",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkInstance.",
+ __FUNCTION__, VK_KHR_SURFACE_EXTENSION_NAME);
+ }
+ if (!pPresentModeCount) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ physicalDevice, "pPresentModeCount");
+ }
- // Validate that the surface extension was enabled:
- if (pPhysicalDevice && pPhysicalDevice->pInstance &&
- !pPhysicalDevice->pInstance->surfaceExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
- pPhysicalDevice->pInstance,
- "VkInstance",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkInstance.",
- __FUNCTION__, VK_KHR_SURFACE_EXTENSION_NAME);
- }
- if (!pPresentModeCount) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- physicalDevice,
- "pPresentModeCount");
- }
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->instance_dispatch_table
+ ->GetPhysicalDeviceSurfacePresentModesKHR(
+ physicalDevice, surface,
+ pPresentModeCount, pPresentModes);
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->instance_dispatch_table->GetPhysicalDeviceSurfacePresentModesKHR(
- physicalDevice, surface, pPresentModeCount, pPresentModes);
-
- if ((result == VK_SUCCESS) && pPhysicalDevice && !pPresentModes &&
- pPresentModeCount) {
- // Record the result of this preliminary query:
- pPhysicalDevice->presentModeCount = *pPresentModeCount;
- }
- else if ((result == VK_SUCCESS) && pPhysicalDevice && pPresentModes &&
- pPresentModeCount) {
- // Compare the preliminary value of *pPresentModeCount with the
- // value this time:
- if (*pPresentModeCount > pPhysicalDevice->presentModeCount) {
- LOG_ERROR_INVALID_COUNT(VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- physicalDevice,
- "pPresentModeCount",
- "pPresentModes",
- *pPresentModeCount,
+ if ((result == VK_SUCCESS) && pPhysicalDevice &&
+ !pPresentModes && pPresentModeCount) {
+ // Record the result of this preliminary query:
+ pPhysicalDevice->presentModeCount =
+ *pPresentModeCount;
+ } else if ((result == VK_SUCCESS) &&
+ pPhysicalDevice && pPresentModes &&
+ pPresentModeCount) {
+ // Compare the preliminary value of
+ // *pPresentModeCount with the
+ // value this time:
+ if (*pPresentModeCount >
+ pPhysicalDevice->presentModeCount) {
+ LOG_ERROR_INVALID_COUNT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ physicalDevice, "pPresentModeCount",
+ "pPresentModes", *pPresentModeCount,
pPhysicalDevice->presentModeCount);
- }
- else if (*pPresentModeCount > 0) {
- // Record the result of this query:
- pPhysicalDevice->presentModeCount = *pPresentModeCount;
- pPhysicalDevice->pPresentModes = (VkPresentModeKHR *)
- malloc(*pPresentModeCount * sizeof(VkPresentModeKHR));
- if (pPhysicalDevice->pPresentModes) {
- for (uint32_t i = 0 ; i < *pPresentModeCount ; i++) {
- pPhysicalDevice->pPresentModes[i] = pPresentModes[i];
+ } else if (*pPresentModeCount > 0) {
+ // Record the result of this query:
+ pPhysicalDevice->presentModeCount =
+ *pPresentModeCount;
+ pPhysicalDevice->pPresentModes =
+ (VkPresentModeKHR *)malloc(
+ *pPresentModeCount *
+ sizeof(VkPresentModeKHR));
+ if (pPhysicalDevice->pPresentModes) {
+ for (uint32_t i = 0;
+ i < *pPresentModeCount; i++) {
+ pPhysicalDevice->pPresentModes[i] =
+ pPresentModes[i];
+ }
+ } else {
+ pPhysicalDevice->presentModeCount = 0;
+ }
+ }
+ }
+
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
}
- } else {
- pPhysicalDevice->presentModeCount = 0;
- }
- }
- }
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
+ // This function does the up-front validation work for
+ // vkCreateSwapchainKHR(),
+ // and returns VK_TRUE if a logging callback indicates that
+ // the call down the
+ // chain should be skipped:
+ static VkBool32 validateCreateSwapchainKHR(
+ VkDevice device,
+ const VkSwapchainCreateInfoKHR *pCreateInfo,
+ VkSwapchainKHR *pSwapchain) {
+ // TODO: Validate cases of re-creating a swapchain (the
+ // current code
+ // assumes a new swapchain is being created).
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(device), layer_data_map);
+ char fn[] = "vkCreateSwapchainKHR";
+ SwpDevice *pDevice = &my_data->deviceMap[device];
-// This function does the up-front validation work for vkCreateSwapchainKHR(),
-// and returns VK_TRUE if a logging callback indicates that the call down the
-// chain should be skipped:
-static VkBool32 validateCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- VkSwapchainKHR* pSwapchain)
-{
-// TODO: Validate cases of re-creating a swapchain (the current code
-// assumes a new swapchain is being created).
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- char fn[] = "vkCreateSwapchainKHR";
- SwpDevice *pDevice = &my_data->deviceMap[device];
+ // Validate that the swapchain extension was enabled:
+ if (pDevice && !pDevice->swapchainExtensionEnabled) {
+ return LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice", SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkDevice.",
+ fn, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+ }
+ if (!pCreateInfo) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pCreateInfo");
+ } else {
+ if (pCreateInfo->sType !=
+ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR) {
+ skipCall |= LOG_ERROR_WRONG_STYPE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo", "VK_STRUCTURE_TYPE_"
+ "SWAPCHAIN_CREATE_"
+ "INFO_KHR");
+ }
+ if (pCreateInfo->pNext != NULL) {
+ skipCall |= LOG_INFO_WRONG_NEXT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pCreateInfo");
+ }
+ }
+ if (!pSwapchain) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pSwapchain");
+ }
- // Validate that the swapchain extension was enabled:
- if (pDevice && !pDevice->swapchainExtensionEnabled) {
- return LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkDevice.",
- fn, VK_KHR_SWAPCHAIN_EXTENSION_NAME );
- }
- if (!pCreateInfo) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- } else {
- if (pCreateInfo->sType != VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR) {
- skipCall |= LOG_ERROR_WRONG_STYPE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo",
- "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR");
- }
- if (pCreateInfo->pNext != NULL) {
- skipCall |= LOG_INFO_WRONG_NEXT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pCreateInfo");
- }
- }
- if (!pSwapchain) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pSwapchain");
- }
+ // Keep around a useful pointer to pPhysicalDevice:
+ SwpPhysicalDevice *pPhysicalDevice =
+ pDevice->pPhysicalDevice;
- // Keep around a useful pointer to pPhysicalDevice:
- SwpPhysicalDevice *pPhysicalDevice = pDevice->pPhysicalDevice;
+ // Validate pCreateInfo values with the results of
+ // vkGetPhysicalDeviceSurfaceCapabilitiesKHR():
+ if (!pPhysicalDevice ||
+ !pPhysicalDevice->gotSurfaceCapabilities) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice", SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY,
+ "%s() called before calling "
+ "vkGetPhysicalDeviceSurfaceCapabilitiesKHR().",
+ fn);
+ } else if (pCreateInfo) {
+ // Validate pCreateInfo->surface to make sure that
+ // vkGetPhysicalDeviceSurfaceSupportKHR() reported
+ // this as a supported
+ // surface:
+ SwpSurface *pSurface =
+ ((pPhysicalDevice)
+ ? pPhysicalDevice->supportedSurfaces
+ [pCreateInfo->surface]
+ : NULL);
+ if (!pSurface) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_UNSUPPORTED_SURFACE,
+ "%s() called with pCreateInfo->surface "
+ "that "
+ "was not returned by "
+ "vkGetPhysicalDeviceSurfaceSupportKHR() "
+ "for the device.",
+ fn);
+ }
- // Validate pCreateInfo values with the results of
- // vkGetPhysicalDeviceSurfaceCapabilitiesKHR():
- if (!pPhysicalDevice || !pPhysicalDevice->gotSurfaceCapabilities) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY,
- "%s() called before calling "
- "vkGetPhysicalDeviceSurfaceCapabilitiesKHR().",
- fn);
- } else if (pCreateInfo) {
- // Validate pCreateInfo->surface to make sure that
- // vkGetPhysicalDeviceSurfaceSupportKHR() reported this as a supported
- // surface:
- SwpSurface *pSurface =
- ((pPhysicalDevice) ?
- pPhysicalDevice->supportedSurfaces[pCreateInfo->surface] : NULL);
- if (!pSurface) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_UNSUPPORTED_SURFACE,
- "%s() called with pCreateInfo->surface that "
- "was not returned by "
- "vkGetPhysicalDeviceSurfaceSupportKHR() "
- "for the device.",
- fn);
- }
+ // Validate pCreateInfo->minImageCount against
+ // VkSurfaceCapabilitiesKHR::{min|max}ImageCount:
+ VkSurfaceCapabilitiesKHR *pCapabilities =
+ &pPhysicalDevice->surfaceCapabilities;
+ if ((pCreateInfo->minImageCount <
+ pCapabilities->minImageCount) ||
+ ((pCapabilities->maxImageCount > 0) &&
+ (pCreateInfo->minImageCount >
+ pCapabilities->maxImageCount))) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_MIN_IMG_COUNT,
+ "%s() called with "
+ "pCreateInfo->minImageCount "
+ "= %d, which is outside the bounds "
+ "returned "
+ "by "
+ "vkGetPhysicalDeviceSurfaceCapabilitiesKHR("
+ ") (i.e. "
+ "minImageCount = %d, maxImageCount = %d).",
+ fn, pCreateInfo->minImageCount,
+ pCapabilities->minImageCount,
+ pCapabilities->maxImageCount);
+ }
+ // Validate pCreateInfo->imageExtent against
+ // VkSurfaceCapabilitiesKHR::{current|min|max}ImageExtent:
+ if ((pCapabilities->currentExtent.width == -1) &&
+ ((pCreateInfo->imageExtent.width <
+ pCapabilities->minImageExtent.width) ||
+ (pCreateInfo->imageExtent.width >
+ pCapabilities->maxImageExtent.width) ||
+ (pCreateInfo->imageExtent.height <
+ pCapabilities->minImageExtent.height) ||
+ (pCreateInfo->imageExtent.height >
+ pCapabilities->maxImageExtent.height))) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_OUT_OF_BOUNDS_EXTENTS,
+ "%s() called with pCreateInfo->imageExtent "
+ "= "
+ "(%d,%d), which is outside the bounds "
+ "returned by "
+ "vkGetPhysicalDeviceSurfaceCapabilitiesKHR("
+ "): "
+ "currentExtent = (%d,%d), minImageExtent = "
+ "(%d,%d), maxImageExtent = (%d,%d).",
+ fn, pCreateInfo->imageExtent.width,
+ pCreateInfo->imageExtent.height,
+ pCapabilities->currentExtent.width,
+ pCapabilities->currentExtent.height,
+ pCapabilities->minImageExtent.width,
+ pCapabilities->minImageExtent.height,
+ pCapabilities->maxImageExtent.width,
+ pCapabilities->maxImageExtent.height);
+ }
+ if ((pCapabilities->currentExtent.width != -1) &&
+ ((pCreateInfo->imageExtent.width !=
+ pCapabilities->currentExtent.width) ||
+ (pCreateInfo->imageExtent.height !=
+ pCapabilities->currentExtent.height))) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_EXTENTS_NO_MATCH_WIN,
+ "%s() called with pCreateInfo->imageExtent "
+ "= "
+ "(%d,%d), which is not equal to the "
+ "currentExtent = (%d,%d) returned by "
+ "vkGetPhysicalDeviceSurfaceCapabilitiesKHR("
+ ").",
+ fn, pCreateInfo->imageExtent.width,
+ pCreateInfo->imageExtent.height,
+ pCapabilities->currentExtent.width,
+ pCapabilities->currentExtent.height);
+ }
+ // Validate pCreateInfo->preTransform has one bit
+ // set (1st two
+ // lines of if-statement), which bit is also set in
+ // VkSurfaceCapabilitiesKHR::supportedTransforms
+ // (3rd line of if-statement):
+ if (!pCreateInfo->preTransform ||
+ (pCreateInfo->preTransform &
+ (pCreateInfo->preTransform - 1)) ||
+ !(pCreateInfo->preTransform &
+ pCapabilities->supportedTransforms)) {
+ // This is an error situation; one for which
+ // we'd like to give
+ // the developer a helpful, multi-line error
+ // message. Build it
+ // up a little at a time, and then log it:
+ std::string errorString = "";
+ char str[1024];
+ // Here's the first part of the message:
+ sprintf(str,
+ "%s() called with a non-supported "
+ "pCreateInfo->preTransform (i.e. %s). "
+ "Supported values are:\n",
+ fn, surfaceTransformStr(
+ pCreateInfo->preTransform));
+ errorString += str;
+ for (int i = 0; i < 32; i++) {
+ // Build up the rest of the message:
+ if ((1 << i) &
+ pCapabilities->supportedTransforms) {
+ const char *newStr =
+ surfaceTransformStr(
+ (VkSurfaceTransformFlagBitsKHR)(
+ 1 << i));
+ sprintf(str, " %s\n", newStr);
+ errorString += str;
+ }
+ }
+ // Log the message that we've built up:
+ skipCall |= debug_report_log_msg(
+ my_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ (uint64_t)device, __LINE__,
+ SWAPCHAIN_CREATE_SWAP_BAD_PRE_TRANSFORM,
+ LAYER_NAME, errorString.c_str());
+ }
+ // Validate pCreateInfo->compositeAlpha has one bit
+ // set (1st two
+ // lines of if-statement), which bit is also set in
+ // VkSurfaceCapabilitiesKHR::supportedCompositeAlpha
+ // (3rd line of if-statement):
+ if (!pCreateInfo->compositeAlpha ||
+ (pCreateInfo->compositeAlpha &
+ (pCreateInfo->compositeAlpha - 1)) ||
+ !((pCreateInfo->compositeAlpha) &
+ pCapabilities->supportedCompositeAlpha)) {
+ // This is an error situation; one for which
+ // we'd like to give
+ // the developer a helpful, multi-line error
+ // message. Build it
+ // up a little at a time, and then log it:
+ std::string errorString = "";
+ char str[1024];
+ // Here's the first part of the message:
+ sprintf(
+ str,
+ "%s() called with a non-supported "
+ "pCreateInfo->compositeAlpha (i.e. %s). "
+ "Supported values are:\n",
+ fn, surfaceCompositeAlphaStr(
+ pCreateInfo->compositeAlpha));
+ errorString += str;
+ for (int i = 0; i < 32; i++) {
+ // Build up the rest of the message:
+ if ((1 << i) &
+ pCapabilities
+ ->supportedCompositeAlpha) {
+ const char *newStr =
+ surfaceCompositeAlphaStr(
+ (VkCompositeAlphaFlagBitsKHR)(
+ 1 << i));
+ sprintf(str, " %s\n", newStr);
+ errorString += str;
+ }
+ }
+ // Log the message that we've built up:
+ skipCall |= debug_report_log_msg(
+ my_data->report_data,
+ VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ (uint64_t)device, 0,
+ SWAPCHAIN_CREATE_SWAP_BAD_COMPOSITE_ALPHA,
+ LAYER_NAME, errorString.c_str());
+ }
+ // Validate pCreateInfo->imageArraySize against
+ // VkSurfaceCapabilitiesKHR::maxImageArraySize:
+ if ((pCreateInfo->imageArrayLayers < 1) ||
+ (pCreateInfo->imageArrayLayers >
+ pCapabilities->maxImageArrayLayers)) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_ARRAY_SIZE,
+ "%s() called with a non-supported "
+ "pCreateInfo->imageArraySize (i.e. %d). "
+ "Minimum value is 1, maximum value is %d.",
+ fn, pCreateInfo->imageArrayLayers,
+ pCapabilities->maxImageArrayLayers);
+ }
+ // Validate pCreateInfo->imageUsage against
+ // VkSurfaceCapabilitiesKHR::supportedUsageFlags:
+ if (pCreateInfo->imageUsage !=
+ (pCreateInfo->imageUsage &
+ pCapabilities->supportedUsageFlags)) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_USAGE_FLAGS,
+ "%s() called with a non-supported "
+ "pCreateInfo->imageUsage (i.e. 0x%08x)."
+ " Supported flag bits are 0x%08x.",
+ fn, pCreateInfo->imageUsage,
+ pCapabilities->supportedUsageFlags);
+ }
+ }
- // Validate pCreateInfo->minImageCount against
- // VkSurfaceCapabilitiesKHR::{min|max}ImageCount:
- VkSurfaceCapabilitiesKHR *pCapabilities = &pPhysicalDevice->surfaceCapabilities;
- if ((pCreateInfo->minImageCount < pCapabilities->minImageCount) ||
- ((pCapabilities->maxImageCount > 0) &&
- (pCreateInfo->minImageCount > pCapabilities->maxImageCount))) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_MIN_IMG_COUNT,
- "%s() called with pCreateInfo->minImageCount "
- "= %d, which is outside the bounds returned "
- "by vkGetPhysicalDeviceSurfaceCapabilitiesKHR() (i.e. "
- "minImageCount = %d, maxImageCount = %d).",
- fn,
- pCreateInfo->minImageCount,
- pCapabilities->minImageCount,
- pCapabilities->maxImageCount);
- }
- // Validate pCreateInfo->imageExtent against
- // VkSurfaceCapabilitiesKHR::{current|min|max}ImageExtent:
- if ((pCapabilities->currentExtent.width == -1) &&
- ((pCreateInfo->imageExtent.width < pCapabilities->minImageExtent.width) ||
- (pCreateInfo->imageExtent.width > pCapabilities->maxImageExtent.width) ||
- (pCreateInfo->imageExtent.height < pCapabilities->minImageExtent.height) ||
- (pCreateInfo->imageExtent.height > pCapabilities->maxImageExtent.height))) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_OUT_OF_BOUNDS_EXTENTS,
- "%s() called with pCreateInfo->imageExtent = "
- "(%d,%d), which is outside the bounds "
- "returned by vkGetPhysicalDeviceSurfaceCapabilitiesKHR(): "
- "currentExtent = (%d,%d), minImageExtent = "
- "(%d,%d), maxImageExtent = (%d,%d).",
- fn,
- pCreateInfo->imageExtent.width,
- pCreateInfo->imageExtent.height,
- pCapabilities->currentExtent.width,
- pCapabilities->currentExtent.height,
- pCapabilities->minImageExtent.width,
- pCapabilities->minImageExtent.height,
- pCapabilities->maxImageExtent.width,
- pCapabilities->maxImageExtent.height);
- }
- if ((pCapabilities->currentExtent.width != -1) &&
- ((pCreateInfo->imageExtent.width != pCapabilities->currentExtent.width) ||
- (pCreateInfo->imageExtent.height != pCapabilities->currentExtent.height))) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_EXTENTS_NO_MATCH_WIN,
- "%s() called with pCreateInfo->imageExtent = "
- "(%d,%d), which is not equal to the "
- "currentExtent = (%d,%d) returned by "
- "vkGetPhysicalDeviceSurfaceCapabilitiesKHR().",
- fn,
- pCreateInfo->imageExtent.width,
- pCreateInfo->imageExtent.height,
- pCapabilities->currentExtent.width,
- pCapabilities->currentExtent.height);
- }
- // Validate pCreateInfo->preTransform has one bit set (1st two
- // lines of if-statement), which bit is also set in
- // VkSurfaceCapabilitiesKHR::supportedTransforms (3rd line of if-statement):
- if (!pCreateInfo->preTransform ||
- (pCreateInfo->preTransform & (pCreateInfo->preTransform - 1)) ||
- !(pCreateInfo->preTransform & pCapabilities->supportedTransforms)) {
- // This is an error situation; one for which we'd like to give
- // the developer a helpful, multi-line error message. Build it
- // up a little at a time, and then log it:
- std::string errorString = "";
- char str[1024];
- // Here's the first part of the message:
- sprintf(str, "%s() called with a non-supported "
- "pCreateInfo->preTransform (i.e. %s). "
- "Supported values are:\n",
- fn,
- surfaceTransformStr(pCreateInfo->preTransform));
- errorString += str;
- for (int i = 0; i < 32; i++) {
- // Build up the rest of the message:
- if ((1 << i) & pCapabilities->supportedTransforms) {
- const char *newStr =
- surfaceTransformStr((VkSurfaceTransformFlagBitsKHR) (1 << i));
- sprintf(str, " %s\n", newStr);
- errorString += str;
- }
- }
- // Log the message that we've built up:
- skipCall |= debug_report_log_msg(my_data->report_data,
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- (uint64_t) device, __LINE__,
- SWAPCHAIN_CREATE_SWAP_BAD_PRE_TRANSFORM,
- LAYER_NAME,
- errorString.c_str());
- }
- // Validate pCreateInfo->compositeAlpha has one bit set (1st two
- // lines of if-statement), which bit is also set in
- // VkSurfaceCapabilitiesKHR::supportedCompositeAlpha (3rd line of if-statement):
- if (!pCreateInfo->compositeAlpha ||
- (pCreateInfo->compositeAlpha & (pCreateInfo->compositeAlpha - 1)) ||
- !((pCreateInfo->compositeAlpha) & pCapabilities->supportedCompositeAlpha)) {
- // This is an error situation; one for which we'd like to give
- // the developer a helpful, multi-line error message. Build it
- // up a little at a time, and then log it:
- std::string errorString = "";
- char str[1024];
- // Here's the first part of the message:
- sprintf(str, "%s() called with a non-supported "
- "pCreateInfo->compositeAlpha (i.e. %s). "
- "Supported values are:\n",
- fn,
- surfaceCompositeAlphaStr(pCreateInfo->compositeAlpha));
- errorString += str;
- for (int i = 0; i < 32; i++) {
- // Build up the rest of the message:
- if ((1 << i) & pCapabilities->supportedCompositeAlpha) {
- const char *newStr =
- surfaceCompositeAlphaStr((VkCompositeAlphaFlagBitsKHR) (1 << i));
- sprintf(str, " %s\n", newStr);
- errorString += str;
- }
- }
- // Log the message that we've built up:
- skipCall |= debug_report_log_msg(my_data->report_data,
- VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- (uint64_t) device, 0,
- SWAPCHAIN_CREATE_SWAP_BAD_COMPOSITE_ALPHA,
- LAYER_NAME,
- errorString.c_str());
- }
- // Validate pCreateInfo->imageArraySize against
- // VkSurfaceCapabilitiesKHR::maxImageArraySize:
- if ((pCreateInfo->imageArrayLayers < 1) ||
- (pCreateInfo->imageArrayLayers > pCapabilities->maxImageArrayLayers)) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_ARRAY_SIZE,
- "%s() called with a non-supported "
- "pCreateInfo->imageArraySize (i.e. %d). "
- "Minimum value is 1, maximum value is %d.",
- fn,
- pCreateInfo->imageArrayLayers,
- pCapabilities->maxImageArrayLayers);
- }
- // Validate pCreateInfo->imageUsage against
- // VkSurfaceCapabilitiesKHR::supportedUsageFlags:
- if (pCreateInfo->imageUsage !=
- (pCreateInfo->imageUsage & pCapabilities->supportedUsageFlags)) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_USAGE_FLAGS,
- "%s() called with a non-supported "
- "pCreateInfo->imageUsage (i.e. 0x%08x)."
- " Supported flag bits are 0x%08x.",
- fn,
- pCreateInfo->imageUsage,
- pCapabilities->supportedUsageFlags);
- }
- }
+ // Validate pCreateInfo values with the results of
+ // vkGetPhysicalDeviceSurfaceFormatsKHR():
+ if (!pPhysicalDevice ||
+ !pPhysicalDevice->surfaceFormatCount) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice", SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY,
+ "%s() called before calling "
+ "vkGetPhysicalDeviceSurfaceFormatsKHR().",
+ fn);
+ } else if (pCreateInfo) {
+ // Validate pCreateInfo->imageFormat against
+ // VkSurfaceFormatKHR::format:
+ bool foundFormat = false;
+ bool foundColorSpace = false;
+ bool foundMatch = false;
+ for (uint32_t i = 0;
+ i < pPhysicalDevice->surfaceFormatCount; i++) {
+ if (pCreateInfo->imageFormat ==
+ pPhysicalDevice->pSurfaceFormats[i]
+ .format) {
+ // Validate pCreateInfo->imageColorSpace
+ // against
+ // VkSurfaceFormatKHR::colorSpace:
+ foundFormat = true;
+ if (pCreateInfo->imageColorSpace ==
+ pPhysicalDevice->pSurfaceFormats[i]
+ .colorSpace) {
+ foundMatch = true;
+ break;
+ }
+ } else {
+ if (pCreateInfo->imageColorSpace ==
+ pPhysicalDevice->pSurfaceFormats[i]
+ .colorSpace) {
+ foundColorSpace = true;
+ }
+ }
+ }
+ if (!foundMatch) {
+ if (!foundFormat) {
+ if (!foundColorSpace) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_FMT_CLR_SP,
+ "%s() called with neither a "
+ "supported "
+ "pCreateInfo->imageFormat "
+ "(i.e. %d) nor a supported "
+ "pCreateInfo->imageColorSpace "
+ "(i.e. %d).",
+ fn, pCreateInfo->imageFormat,
+ pCreateInfo->imageColorSpace);
+ } else {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_FORMAT,
+ "%s() called with a non-supported "
+ "pCreateInfo->imageFormat (i.e. "
+ "%d).",
+ fn, pCreateInfo->imageFormat);
+ }
+ } else if (!foundColorSpace) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_COLOR_SPACE,
+ "%s() called with a non-supported "
+ "pCreateInfo->imageColorSpace (i.e. "
+ "%d).",
+ fn, pCreateInfo->imageColorSpace);
+ }
+ }
+ }
- // Validate pCreateInfo values with the results of
- // vkGetPhysicalDeviceSurfaceFormatsKHR():
- if (!pPhysicalDevice || !pPhysicalDevice->surfaceFormatCount) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY,
- "%s() called before calling "
- "vkGetPhysicalDeviceSurfaceFormatsKHR().",
- fn);
- } else if (pCreateInfo) {
- // Validate pCreateInfo->imageFormat against
- // VkSurfaceFormatKHR::format:
- bool foundFormat = false;
- bool foundColorSpace = false;
- bool foundMatch = false;
- for (uint32_t i = 0 ; i < pPhysicalDevice->surfaceFormatCount ; i++) {
- if (pCreateInfo->imageFormat == pPhysicalDevice->pSurfaceFormats[i].format) {
- // Validate pCreateInfo->imageColorSpace against
- // VkSurfaceFormatKHR::colorSpace:
- foundFormat = true;
- if (pCreateInfo->imageColorSpace == pPhysicalDevice->pSurfaceFormats[i].colorSpace) {
- foundMatch = true;
- break;
- }
- } else {
- if (pCreateInfo->imageColorSpace == pPhysicalDevice->pSurfaceFormats[i].colorSpace) {
- foundColorSpace = true;
- }
- }
- }
- if (!foundMatch) {
- if (!foundFormat) {
- if (!foundColorSpace) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
- "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_FMT_CLR_SP,
- "%s() called with neither a "
- "supported pCreateInfo->imageFormat "
- "(i.e. %d) nor a supported "
- "pCreateInfo->imageColorSpace "
- "(i.e. %d).",
- fn,
- pCreateInfo->imageFormat,
- pCreateInfo->imageColorSpace);
- } else {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
- "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_FORMAT,
- "%s() called with a non-supported "
- "pCreateInfo->imageFormat (i.e. %d).",
- fn, pCreateInfo->imageFormat);
- }
- } else if (!foundColorSpace) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_COLOR_SPACE,
- "%s() called with a non-supported "
- "pCreateInfo->imageColorSpace (i.e. %d).",
- fn, pCreateInfo->imageColorSpace);
- }
- }
- }
+ // Validate pCreateInfo values with the results of
+ // vkGetPhysicalDeviceSurfacePresentModesKHR():
+ if (!pPhysicalDevice ||
+ !pPhysicalDevice->presentModeCount) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice", SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY,
+ "%s() called before calling "
+ "vkGetPhysicalDeviceSurfacePresentModesKHR().",
+ fn);
+ } else if (pCreateInfo) {
+ // Validate pCreateInfo->presentMode against
+ // vkGetPhysicalDeviceSurfacePresentModesKHR():
+ bool foundMatch = false;
+ for (uint32_t i = 0;
+ i < pPhysicalDevice->presentModeCount; i++) {
+ if (pPhysicalDevice->pPresentModes[i] ==
+ pCreateInfo->presentMode) {
+ foundMatch = true;
+ break;
+ }
+ }
+ if (!foundMatch) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_PRESENT_MODE,
+ "%s() called with a non-supported "
+ "pCreateInfo->presentMode (i.e. %s).",
+ fn,
+ presentModeStr(pCreateInfo->presentMode));
+ }
+ }
- // Validate pCreateInfo values with the results of
- // vkGetPhysicalDeviceSurfacePresentModesKHR():
- if (!pPhysicalDevice || !pPhysicalDevice->presentModeCount) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY,
- "%s() called before calling "
- "vkGetPhysicalDeviceSurfacePresentModesKHR().",
- fn);
- } else if (pCreateInfo) {
- // Validate pCreateInfo->presentMode against
- // vkGetPhysicalDeviceSurfacePresentModesKHR():
- bool foundMatch = false;
- for (uint32_t i = 0 ; i < pPhysicalDevice->presentModeCount ; i++) {
- if (pPhysicalDevice->pPresentModes[i] == pCreateInfo->presentMode) {
- foundMatch = true;
- break;
- }
- }
- if (!foundMatch) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_PRESENT_MODE,
- "%s() called with a non-supported "
- "pCreateInfo->presentMode (i.e. %s).",
- fn,
- presentModeStr(pCreateInfo->presentMode));
- }
- }
+ // Validate pCreateInfo->imageSharingMode and related
+ // values:
+ if (pCreateInfo->imageSharingMode ==
+ VK_SHARING_MODE_CONCURRENT) {
+ if ((pCreateInfo->queueFamilyIndexCount <= 1) ||
+ !pCreateInfo->pQueueFamilyIndices) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_SHARING_VALUES,
+ "%s() called with a supported "
+ "pCreateInfo->sharingMode of (i.e. %s),"
+ "but with a bad value(s) for "
+ "pCreateInfo->queueFamilyIndexCount or "
+ "pCreateInfo->pQueueFamilyIndices).",
+ fn, sharingModeStr(
+ pCreateInfo->imageSharingMode));
+ }
+ } else if (pCreateInfo->imageSharingMode !=
+ VK_SHARING_MODE_EXCLUSIVE) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_BAD_SHARING_MODE,
+ "%s() called with a non-supported "
+ "pCreateInfo->imageSharingMode (i.e. %s).",
+ fn,
+ sharingModeStr(pCreateInfo->imageSharingMode));
+ }
- // Validate pCreateInfo->imageSharingMode and related values:
- if (pCreateInfo->imageSharingMode == VK_SHARING_MODE_CONCURRENT) {
- if ((pCreateInfo->queueFamilyIndexCount <= 1) ||
- !pCreateInfo->pQueueFamilyIndices) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_SHARING_VALUES,
- "%s() called with a supported "
- "pCreateInfo->sharingMode of (i.e. %s),"
- "but with a bad value(s) for "
- "pCreateInfo->queueFamilyIndexCount or "
- "pCreateInfo->pQueueFamilyIndices).",
- fn,
- sharingModeStr(pCreateInfo->imageSharingMode));
- }
- } else if (pCreateInfo->imageSharingMode != VK_SHARING_MODE_EXCLUSIVE) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_BAD_SHARING_MODE,
- "%s() called with a non-supported "
- "pCreateInfo->imageSharingMode (i.e. %s).",
- fn,
- sharingModeStr(pCreateInfo->imageSharingMode));
- }
+ // Validate pCreateInfo->clipped:
+ if (pCreateInfo && (pCreateInfo->clipped != VK_FALSE) &&
+ (pCreateInfo->clipped != VK_TRUE)) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice", SWAPCHAIN_BAD_BOOL,
+ "%s() called with a VkBool32 value that is "
+ "neither VK_TRUE nor VK_FALSE, but has the "
+ "numeric value of %d.",
+ fn, pCreateInfo->clipped);
+ }
- // Validate pCreateInfo->clipped:
- if (pCreateInfo &&
- (pCreateInfo->clipped != VK_FALSE) &&
- (pCreateInfo->clipped != VK_TRUE)) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device, "VkDevice",
- SWAPCHAIN_BAD_BOOL,
- "%s() called with a VkBool32 value that is "
- "neither VK_TRUE nor VK_FALSE, but has the "
- "numeric value of %d.",
- fn,
- pCreateInfo->clipped);
- }
+ // Validate pCreateInfo->oldSwapchain:
+ if (pCreateInfo && pCreateInfo->oldSwapchain) {
+ SwpSwapchain *pOldSwapchain =
+ &my_data
+ ->swapchainMap[pCreateInfo->oldSwapchain];
+ if (pOldSwapchain) {
+ if (device != pOldSwapchain->pDevice->device) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_DESTROY_SWAP_DIFF_DEVICE,
+ "%s() called with a different VkDevice "
+ "than the VkSwapchainKHR was created "
+ "with.",
+ __FUNCTION__);
+ }
+ if (pCreateInfo->surface !=
+ pOldSwapchain->pSurface->surface) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_CREATE_SWAP_DIFF_SURFACE,
+ "%s() called with "
+ "pCreateInfo->oldSwapchain "
+ "that has a different VkSurfaceKHR "
+ "than "
+ "pCreateInfo->surface.",
+ fn);
+ }
+ } else {
+ // TBD: Leave this in (not sure object_track
+ // will check this)?
+ skipCall |= LOG_ERROR_NON_VALID_OBJ(
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ pCreateInfo->oldSwapchain,
+ "VkSwapchainKHR");
+ }
+ }
- // Validate pCreateInfo->oldSwapchain:
- if (pCreateInfo && pCreateInfo->oldSwapchain) {
- SwpSwapchain *pOldSwapchain = &my_data->swapchainMap[pCreateInfo->oldSwapchain];
- if (pOldSwapchain) {
- if (device != pOldSwapchain->pDevice->device) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device, "VkDevice",
- SWAPCHAIN_DESTROY_SWAP_DIFF_DEVICE,
- "%s() called with a different VkDevice "
- "than the VkSwapchainKHR was created with.",
- __FUNCTION__);
- }
- if (pCreateInfo->surface != pOldSwapchain->pSurface->surface) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device, "VkDevice",
- SWAPCHAIN_CREATE_SWAP_DIFF_SURFACE,
- "%s() called with pCreateInfo->oldSwapchain "
- "that has a different VkSurfaceKHR than "
- "pCreateInfo->surface.",
- fn);
- }
- } else {
- // TBD: Leave this in (not sure object_track will check this)?
- skipCall |= LOG_ERROR_NON_VALID_OBJ(VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- pCreateInfo->oldSwapchain,
- "VkSwapchainKHR");
- }
- }
+ return skipCall;
+ }
- return skipCall;
-}
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateSwapchainKHR(
+ VkDevice device,
+ const VkSwapchainCreateInfoKHR *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkSwapchainKHR *pSwapchain) {
+ VkResult result = VK_SUCCESS;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(device), layer_data_map);
+ VkBool32 skipCall = validateCreateSwapchainKHR(
+ device, pCreateInfo, pSwapchain);
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(
- VkDevice device,
- const VkSwapchainCreateInfoKHR* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkSwapchainKHR* pSwapchain)
-{
- VkResult result = VK_SUCCESS;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkBool32 skipCall = validateCreateSwapchainKHR(device, pCreateInfo,
- pSwapchain);
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result = my_data->device_dispatch_table
+ ->CreateSwapchainKHR(
+ device, pCreateInfo, pAllocator,
+ pSwapchain);
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->device_dispatch_table->CreateSwapchainKHR(
- device, pCreateInfo, pAllocator, pSwapchain);
+ if (result == VK_SUCCESS) {
+ // Remember the swapchain's handle, and link it
+ // to the device:
+ SwpDevice *pDevice =
+ &my_data->deviceMap[device];
- if (result == VK_SUCCESS) {
- // Remember the swapchain's handle, and link it to the device:
- SwpDevice *pDevice = &my_data->deviceMap[device];
+ my_data->swapchainMap[*pSwapchain].swapchain =
+ *pSwapchain;
+ if (pDevice) {
+ pDevice->swapchains[*pSwapchain] =
+ &my_data->swapchainMap[*pSwapchain];
+ }
+ my_data->swapchainMap[*pSwapchain].pDevice =
+ pDevice;
+ my_data->swapchainMap[*pSwapchain].imageCount =
+ 0;
+ my_data->swapchainMap[*pSwapchain]
+ .usedAllocatorToCreate =
+ (pAllocator != NULL);
+ // Store a pointer to the surface
+ SwpPhysicalDevice *pPhysicalDevice =
+ pDevice->pPhysicalDevice;
+ SwpInstance *pInstance =
+ (pPhysicalDevice)
+ ? pPhysicalDevice->pInstance
+ : NULL;
+ layer_data *my_instance_data =
+ ((pInstance) ? get_my_data_ptr(
+ get_dispatch_key(
+ pInstance->instance),
+ layer_data_map)
+ : NULL);
+ SwpSurface *pSurface =
+ ((my_data && pCreateInfo)
+ ? &my_instance_data->surfaceMap
+ [pCreateInfo->surface]
+ : NULL);
+ my_data->swapchainMap[*pSwapchain].pSurface =
+ pSurface;
+ if (pSurface) {
+ pSurface->swapchains[*pSwapchain] =
+ &my_data->swapchainMap[*pSwapchain];
+ }
+ }
- my_data->swapchainMap[*pSwapchain].swapchain = *pSwapchain;
- if (pDevice) {
- pDevice->swapchains[*pSwapchain] =
- &my_data->swapchainMap[*pSwapchain];
- }
- my_data->swapchainMap[*pSwapchain].pDevice = pDevice;
- my_data->swapchainMap[*pSwapchain].imageCount = 0;
- my_data->swapchainMap[*pSwapchain].usedAllocatorToCreate =
- (pAllocator != NULL);
- // Store a pointer to the surface
- SwpPhysicalDevice *pPhysicalDevice = pDevice->pPhysicalDevice;
- SwpInstance *pInstance =
- (pPhysicalDevice) ? pPhysicalDevice->pInstance : NULL;
- layer_data *my_instance_data =
- ((pInstance) ?
- get_my_data_ptr(get_dispatch_key(pInstance->instance), layer_data_map) :
- NULL);
- SwpSurface *pSurface =
- ((my_data && pCreateInfo) ?
- &my_instance_data->surfaceMap[pCreateInfo->surface] : NULL);
- my_data->swapchainMap[*pSwapchain].pSurface = pSurface;
- if (pSurface) {
- pSurface->swapchains[*pSwapchain] =
- &my_data->swapchainMap[*pSwapchain];
- }
- }
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
+ VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroySwapchainKHR(
+ VkDevice device, VkSwapchainKHR swapchain,
+ const VkAllocationCallbacks *pAllocator) {
+ // TODOs:
+ //
+ // - Implement a check for validity language that reads:
+ // All uses of
+ // presentable images acquired from pname:swapchain
+ // and owned by the
+ // application must: have completed execution
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(device), layer_data_map);
+ SwpDevice *pDevice = &my_data->deviceMap[device];
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- const VkAllocationCallbacks* pAllocator)
-{
-// TODOs:
-//
-// - Implement a check for validity language that reads: All uses of
-// presentable images acquired from pname:swapchain and owned by the
-// application must: have completed execution
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- SwpDevice *pDevice = &my_data->deviceMap[device];
+ // Validate that the swapchain extension was enabled:
+ if (pDevice && !pDevice->swapchainExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice", SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkDevice.",
+ __FUNCTION__, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+ }
- // Validate that the swapchain extension was enabled:
- if (pDevice && !pDevice->swapchainExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkDevice.",
- __FUNCTION__, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- }
+ // Regardless of skipCall value, do some internal
+ // cleanup:
+ SwpSwapchain *pSwapchain =
+ &my_data->swapchainMap[swapchain];
+ if (pSwapchain) {
+ // Delete the SwpSwapchain associated with this
+ // swapchain:
+ if (pSwapchain->pDevice) {
+ pSwapchain->pDevice->swapchains.erase(
+ swapchain);
+ if (device != pSwapchain->pDevice->device) {
+ LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "VkDevice",
+ SWAPCHAIN_DESTROY_SWAP_DIFF_DEVICE,
+ "%s() called with a different VkDevice "
+ "than the "
+ "VkSwapchainKHR was created with.",
+ __FUNCTION__);
+ }
+ }
+ if (pSwapchain->pSurface) {
+ pSwapchain->pSurface->swapchains.erase(
+ swapchain);
+ }
+ if (pSwapchain->imageCount) {
+ pSwapchain->images.clear();
+ }
+ if ((pAllocator != NULL) !=
+ pSwapchain->usedAllocatorToCreate) {
+ LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ instance, "VkInstance",
+ SWAPCHAIN_INCOMPATIBLE_ALLOCATOR,
+ "%s() called with incompatible pAllocator "
+ "from when "
+ "the object was created.",
+ __FUNCTION__);
+ }
+ my_data->swapchainMap.erase(swapchain);
+ }
- // Regardless of skipCall value, do some internal cleanup:
- SwpSwapchain *pSwapchain = &my_data->swapchainMap[swapchain];
- if (pSwapchain) {
- // Delete the SwpSwapchain associated with this swapchain:
- if (pSwapchain->pDevice) {
- pSwapchain->pDevice->swapchains.erase(swapchain);
- if (device != pSwapchain->pDevice->device) {
- LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_DESTROY_SWAP_DIFF_DEVICE,
- "%s() called with a different VkDevice than the "
- "VkSwapchainKHR was created with.",
- __FUNCTION__);
- }
- }
- if (pSwapchain->pSurface) {
- pSwapchain->pSurface->swapchains.erase(swapchain);
- }
- if (pSwapchain->imageCount) {
- pSwapchain->images.clear();
- }
- if ((pAllocator != NULL) != pSwapchain->usedAllocatorToCreate) {
- LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, instance, "VkInstance",
- SWAPCHAIN_INCOMPATIBLE_ALLOCATOR,
- "%s() called with incompatible pAllocator from when "
- "the object was created.",
- __FUNCTION__);
- }
- my_data->swapchainMap.erase(swapchain);
- }
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ my_data->device_dispatch_table->DestroySwapchainKHR(
+ device, swapchain, pAllocator);
+ }
+ }
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- my_data->device_dispatch_table->DestroySwapchainKHR(device, swapchain, pAllocator);
- }
-}
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkGetSwapchainImagesKHR(VkDevice device,
+ VkSwapchainKHR swapchain,
+ uint32_t * pSwapchainImageCount,
+ VkImage * pSwapchainImages) {
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(device), layer_data_map);
+ SwpDevice *pDevice = &my_data->deviceMap[device];
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint32_t* pSwapchainImageCount,
- VkImage* pSwapchainImages)
-{
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- SwpDevice *pDevice = &my_data->deviceMap[device];
+ // Validate that the swapchain extension was enabled:
+ if (pDevice && !pDevice->swapchainExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice", SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkDevice.",
+ __FUNCTION__, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+ }
+ SwpSwapchain *pSwapchain =
+ &my_data->swapchainMap[swapchain];
+ if (!pSwapchainImageCount) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pSwapchainImageCount");
+ }
- // Validate that the swapchain extension was enabled:
- if (pDevice && !pDevice->swapchainExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkDevice.",
- __FUNCTION__, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- }
- SwpSwapchain *pSwapchain = &my_data->swapchainMap[swapchain];
- if (!pSwapchainImageCount) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pSwapchainImageCount");
- }
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->device_dispatch_table
+ ->GetSwapchainImagesKHR(
+ device, swapchain, pSwapchainImageCount,
+ pSwapchainImages);
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->device_dispatch_table->GetSwapchainImagesKHR(
- device, swapchain, pSwapchainImageCount, pSwapchainImages);
-
- if ((result == VK_SUCCESS) && pSwapchain && !pSwapchainImages &&
- pSwapchainImageCount) {
- // Record the result of this preliminary query:
- pSwapchain->imageCount = *pSwapchainImageCount;
- }
- else if ((result == VK_SUCCESS) && pSwapchain && pSwapchainImages &&
- pSwapchainImageCount) {
- // Compare the preliminary value of *pSwapchainImageCount with the
- // value this time:
- if (*pSwapchainImageCount > pSwapchain->imageCount) {
- LOG_ERROR_INVALID_COUNT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pSwapchainImageCount",
+ if ((result == VK_SUCCESS) && pSwapchain &&
+ !pSwapchainImages && pSwapchainImageCount) {
+ // Record the result of this preliminary query:
+ pSwapchain->imageCount = *pSwapchainImageCount;
+ } else if ((result == VK_SUCCESS) && pSwapchain &&
+ pSwapchainImages &&
+ pSwapchainImageCount) {
+ // Compare the preliminary value of
+ // *pSwapchainImageCount with the
+ // value this time:
+ if (*pSwapchainImageCount >
+ pSwapchain->imageCount) {
+ LOG_ERROR_INVALID_COUNT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pSwapchainImageCount",
"pSwapchainImages",
*pSwapchainImageCount,
pSwapchain->imageCount);
- }
- else if (*pSwapchainImageCount > 0) {
- // Record the images and their state:
- pSwapchain->imageCount = *pSwapchainImageCount;
- for (uint32_t i = 0 ; i < *pSwapchainImageCount ; i++) {
- pSwapchain->images[i].image = pSwapchainImages[i];
- pSwapchain->images[i].pSwapchain = pSwapchain;
- pSwapchain->images[i].ownedByApp = false;
- }
- }
- }
+ } else if (*pSwapchainImageCount > 0) {
+ // Record the images and their state:
+ pSwapchain->imageCount =
+ *pSwapchainImageCount;
+ for (uint32_t i = 0;
+ i < *pSwapchainImageCount; i++) {
+ pSwapchain->images[i].image =
+ pSwapchainImages[i];
+ pSwapchain->images[i].pSwapchain =
+ pSwapchain;
+ pSwapchain->images[i].ownedByApp =
+ false;
+ }
+ }
+ }
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(
- VkDevice device,
- VkSwapchainKHR swapchain,
- uint64_t timeout,
- VkSemaphore semaphore,
- VkFence fence,
- uint32_t* pImageIndex)
-{
-// TODOs:
-//
-// - Address the timeout. Possibilities include looking at the state of the
-// swapchain's images, depending on the timeout value.
-// - Implement a check for validity language that reads: If pname:semaphore is
-// not sname:VK_NULL_HANDLE it must: be unsignalled
-// - Implement a check for validity language that reads: If pname:fence is not
-// sname:VK_NULL_HANDLE it must: be unsignalled and mustnot: be associated
-// with any other queue command that has not yet completed execution on that
-// queue
-// - Record/update the state of the swapchain, in case an error occurs
-// (e.g. VK_ERROR_OUT_OF_DATE_KHR).
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- SwpDevice *pDevice = &my_data->deviceMap[device];
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkAcquireNextImageKHR(
+ VkDevice device, VkSwapchainKHR swapchain,
+ uint64_t timeout, VkSemaphore semaphore,
+ VkFence fence, uint32_t * pImageIndex) {
+ // TODOs:
+ //
+ // - Address the timeout. Possibilities include looking
+ // at the state of the
+ // swapchain's images, depending on the timeout value.
+ // - Implement a check for validity language that reads:
+ // If pname:semaphore is
+ // not sname:VK_NULL_HANDLE it must: be unsignalled
+ // - Implement a check for validity language that reads:
+ // If pname:fence is not
+ // sname:VK_NULL_HANDLE it must: be unsignalled and
+ // mustnot: be associated
+ // with any other queue command that has not yet
+ // completed execution on that
+ // queue
+ // - Record/update the state of the swapchain, in case
+ // an error occurs
+ // (e.g. VK_ERROR_OUT_OF_DATE_KHR).
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(device), layer_data_map);
+ SwpDevice *pDevice = &my_data->deviceMap[device];
- // Validate that the swapchain extension was enabled:
- if (pDevice && !pDevice->swapchainExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device, "VkDevice",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkDevice.",
- __FUNCTION__, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- }
- SwpSwapchain *pSwapchain = &my_data->swapchainMap[swapchain];
- if (pSwapchain) {
- // Look to see if the application is trying to own too many images at
- // the same time (i.e. not leave any to display):
- uint32_t imagesOwnedByApp = 0;
- for (uint32_t i = 0 ; i < pSwapchain->imageCount ; i++) {
- if (pSwapchain->images[i].ownedByApp) {
- imagesOwnedByApp++;
- }
- }
- if (imagesOwnedByApp >= (pSwapchain->imageCount - 1)) {
- skipCall |= LOG_PERF_WARNING(VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- swapchain,
- "VkSwapchainKHR",
- SWAPCHAIN_APP_OWNS_TOO_MANY_IMAGES,
- "%s() called when the application "
- "already owns all presentable images "
- "in this swapchain except for the "
- "image currently being displayed. "
- "This call to %s() cannot succeed "
- "unless another thread calls the "
- "vkQueuePresentKHR() function in "
- "order to release ownership of one of "
- "the presentable images of this "
- "swapchain.",
- __FUNCTION__, __FUNCTION__);
- }
- }
- if (!pImageIndex) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pImageIndex");
- }
+ // Validate that the swapchain extension was enabled:
+ if (pDevice && !pDevice->swapchainExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "VkDevice", SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s extension was "
+ "not enabled for this VkDevice.",
+ __FUNCTION__, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+ }
+ SwpSwapchain *pSwapchain =
+ &my_data->swapchainMap[swapchain];
+ if (pSwapchain) {
+ // Look to see if the application is trying to own
+ // too many images at
+ // the same time (i.e. not leave any to display):
+ uint32_t imagesOwnedByApp = 0;
+ for (uint32_t i = 0; i < pSwapchain->imageCount;
+ i++) {
+ if (pSwapchain->images[i].ownedByApp) {
+ imagesOwnedByApp++;
+ }
+ }
+ if (imagesOwnedByApp >=
+ (pSwapchain->imageCount - 1)) {
+ skipCall |= LOG_PERF_WARNING(
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ swapchain, "VkSwapchainKHR",
+ SWAPCHAIN_APP_OWNS_TOO_MANY_IMAGES,
+ "%s() called when the application "
+ "already owns all presentable images "
+ "in this swapchain except for the "
+ "image currently being displayed. "
+ "This call to %s() cannot succeed "
+ "unless another thread calls the "
+ "vkQueuePresentKHR() function in "
+ "order to release ownership of one of "
+ "the presentable images of this "
+ "swapchain.",
+ __FUNCTION__, __FUNCTION__);
+ }
+ }
+ if (!pImageIndex) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pImageIndex");
+ }
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->device_dispatch_table->AcquireNextImageKHR(
- device, swapchain, timeout, semaphore, fence, pImageIndex);
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result = my_data->device_dispatch_table
+ ->AcquireNextImageKHR(
+ device, swapchain, timeout,
+ semaphore, fence, pImageIndex);
- if (((result == VK_SUCCESS) || (result == VK_SUBOPTIMAL_KHR)) &&
- pSwapchain) {
- // Change the state of the image (now owned by the application):
- pSwapchain->images[*pImageIndex].ownedByApp = true;
- }
+ if (((result == VK_SUCCESS) ||
+ (result == VK_SUBOPTIMAL_KHR)) &&
+ pSwapchain) {
+ // Change the state of the image (now owned by
+ // the application):
+ pSwapchain->images[*pImageIndex].ownedByApp =
+ true;
+ }
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(
- VkQueue queue,
- const VkPresentInfoKHR* pPresentInfo)
-{
-// TODOs:
-//
-// - Implement a check for validity language that reads: Any given element of
-// sname:VkSemaphore in pname:pWaitSemaphores must: refer to a prior signal
-// of that sname:VkSemaphore that won't be consumed by any other wait on that
-// semaphore
-// - Record/update the state of the swapchain, in case an error occurs
-// (e.g. VK_ERROR_OUT_OF_DATE_KHR).
- VkResult result = VK_SUCCESS;
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(queue), layer_data_map);
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkQueuePresentKHR(
+ VkQueue queue,
+ const VkPresentInfoKHR *pPresentInfo) {
+ // TODOs:
+ //
+ // - Implement a check for validity language that reads:
+ // Any given element of
+ // sname:VkSemaphore in pname:pWaitSemaphores must:
+ // refer to a prior signal
+ // of that sname:VkSemaphore that won't be consumed by
+ // any other wait on that
+ // semaphore
+ // - Record/update the state of the swapchain, in case
+ // an error occurs
+ // (e.g. VK_ERROR_OUT_OF_DATE_KHR).
+ VkResult result = VK_SUCCESS;
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(queue), layer_data_map);
- if (!pPresentInfo) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pPresentInfo");
- } else {
- if (pPresentInfo->sType != VK_STRUCTURE_TYPE_PRESENT_INFO_KHR) {
- skipCall |= LOG_ERROR_WRONG_STYPE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pPresentInfo",
- "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR");
- }
- if (pPresentInfo->pNext != NULL) {
- skipCall |= LOG_INFO_WRONG_NEXT(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pPresentInfo");
- }
- if (!pPresentInfo->swapchainCount) {
- skipCall |= LOG_ERROR_ZERO_VALUE(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pPresentInfo->swapchainCount");
- }
- if (!pPresentInfo->pSwapchains) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pPresentInfo->pSwapchains");
- }
- if (!pPresentInfo->pImageIndices) {
- skipCall |= LOG_ERROR_NULL_POINTER(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- device,
- "pPresentInfo->pImageIndices");
- }
- // Note: pPresentInfo->pResults is allowed to be NULL
- }
+ if (!pPresentInfo) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, device,
+ "pPresentInfo");
+ } else {
+ if (pPresentInfo->sType !=
+ VK_STRUCTURE_TYPE_PRESENT_INFO_KHR) {
+ skipCall |= LOG_ERROR_WRONG_STYPE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pPresentInfo",
+ "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR");
+ }
+ if (pPresentInfo->pNext != NULL) {
+ skipCall |= LOG_INFO_WRONG_NEXT(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pPresentInfo");
+ }
+ if (!pPresentInfo->swapchainCount) {
+ skipCall |= LOG_ERROR_ZERO_VALUE(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pPresentInfo->swapchainCount");
+ }
+ if (!pPresentInfo->pSwapchains) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pPresentInfo->pSwapchains");
+ }
+ if (!pPresentInfo->pImageIndices) {
+ skipCall |= LOG_ERROR_NULL_POINTER(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ device, "pPresentInfo->pImageIndices");
+ }
+ // Note: pPresentInfo->pResults is allowed to be
+ // NULL
+ }
- for (uint32_t i = 0;
- pPresentInfo && (i < pPresentInfo->swapchainCount);
- i++) {
- uint32_t swapchainCount = pPresentInfo->swapchainCount;
- uint32_t index = pPresentInfo->pImageIndices[i];
- SwpSwapchain *pSwapchain =
- &my_data->swapchainMap[pPresentInfo->pSwapchains[i]];
- if (pSwapchain) {
- if (!pSwapchain->pDevice->swapchainExtensionEnabled) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
- pSwapchain->pDevice, "VkDevice",
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
- "%s() called even though the %s extension was not enabled for this VkDevice.",
- __FUNCTION__, VK_KHR_SWAPCHAIN_EXTENSION_NAME);
- }
- if (index >= pSwapchain->imageCount) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- pPresentInfo->pSwapchains[i],
- "VkSwapchainKHR",
- SWAPCHAIN_INDEX_TOO_LARGE,
- "%s() called for an index that is too "
- "large (i.e. %d). There are only %d "
- "images in this VkSwapchainKHR.\n",
- __FUNCTION__, index,
- pSwapchain->imageCount);
- } else {
- if (!pSwapchain->images[index].ownedByApp) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- pPresentInfo->pSwapchains[i],
- "VkSwapchainKHR",
- SWAPCHAIN_INDEX_NOT_IN_USE,
- "%s() returned an index (i.e. %d) "
- "for an image that is not owned by "
- "the application.",
- __FUNCTION__, index);
- }
- }
- SwpQueue *pQueue = &my_data->queueMap[queue];
- SwpSurface *pSurface = pSwapchain->pSurface;
- if (pQueue && pSurface && pSurface->numQueueFamilyIndexSupport) {
- uint32_t queueFamilyIndex = pQueue->queueFamilyIndex;
- // Note: the 1st test is to ensure queueFamilyIndex is in range,
- // and the 2nd test is the validation check:
- if ((pSurface->numQueueFamilyIndexSupport > queueFamilyIndex) &&
- (!pSurface->pQueueFamilyIndexSupport[queueFamilyIndex])) {
- skipCall |= LOG_ERROR(VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- pPresentInfo->pSwapchains[i],
- "VkSwapchainKHR",
- SWAPCHAIN_SURFACE_NOT_SUPPORTED_WITH_QUEUE,
- "%s() called with a swapchain whose "
- "surface is not supported for "
- "presention on this device with the "
- "queueFamilyIndex (i.e. %d) of the "
- "given queue.",
- __FUNCTION__, queueFamilyIndex);
- }
- }
- }
- }
+ for (uint32_t i = 0;
+ pPresentInfo && (i < pPresentInfo->swapchainCount);
+ i++) {
+ uint32_t swapchainCount =
+ pPresentInfo->swapchainCount;
+ uint32_t index = pPresentInfo->pImageIndices[i];
+ SwpSwapchain *pSwapchain =
+ &my_data->swapchainMap[pPresentInfo
+ ->pSwapchains[i]];
+ if (pSwapchain) {
+ if (!pSwapchain->pDevice
+ ->swapchainExtensionEnabled) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+ pSwapchain->pDevice, "VkDevice",
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED,
+ "%s() called even though the %s "
+ "extension was not enabled for this "
+ "VkDevice.",
+ __FUNCTION__,
+ VK_KHR_SWAPCHAIN_EXTENSION_NAME);
+ }
+ if (index >= pSwapchain->imageCount) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ pPresentInfo->pSwapchains[i],
+ "VkSwapchainKHR",
+ SWAPCHAIN_INDEX_TOO_LARGE,
+ "%s() called for an index that is too "
+ "large (i.e. %d). There are only %d "
+ "images in this VkSwapchainKHR.\n",
+ __FUNCTION__, index,
+ pSwapchain->imageCount);
+ } else {
+ if (!pSwapchain->images[index].ownedByApp) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ pPresentInfo->pSwapchains[i],
+ "VkSwapchainKHR",
+ SWAPCHAIN_INDEX_NOT_IN_USE,
+ "%s() returned an index (i.e. %d) "
+ "for an image that is not owned by "
+ "the application.",
+ __FUNCTION__, index);
+ }
+ }
+ SwpQueue *pQueue = &my_data->queueMap[queue];
+ SwpSurface *pSurface = pSwapchain->pSurface;
+ if (pQueue && pSurface &&
+ pSurface->numQueueFamilyIndexSupport) {
+ uint32_t queueFamilyIndex =
+ pQueue->queueFamilyIndex;
+ // Note: the 1st test is to ensure
+ // queueFamilyIndex is in range,
+ // and the 2nd test is the validation check:
+ if ((pSurface->numQueueFamilyIndexSupport >
+ queueFamilyIndex) &&
+ (!pSurface->pQueueFamilyIndexSupport
+ [queueFamilyIndex])) {
+ skipCall |= LOG_ERROR(
+ VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ pPresentInfo->pSwapchains[i],
+ "VkSwapchainKHR",
+ SWAPCHAIN_SURFACE_NOT_SUPPORTED_WITH_QUEUE,
+ "%s() called with a swapchain "
+ "whose "
+ "surface is not supported for "
+ "presention on this device with "
+ "the "
+ "queueFamilyIndex (i.e. %d) of the "
+ "given queue.",
+ __FUNCTION__, queueFamilyIndex);
+ }
+ }
+ }
+ }
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- result = my_data->device_dispatch_table->QueuePresentKHR(queue,
- pPresentInfo);
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ result =
+ my_data->device_dispatch_table->QueuePresentKHR(
+ queue, pPresentInfo);
- if (pPresentInfo &&
- ((result == VK_SUCCESS) || (result == VK_SUBOPTIMAL_KHR))) {
- for (uint32_t i = 0; i < pPresentInfo->swapchainCount ; i++) {
- int index = pPresentInfo->pImageIndices[i];
- SwpSwapchain *pSwapchain =
- &my_data->swapchainMap[pPresentInfo->pSwapchains[i]];
- if (pSwapchain) {
- // Change the state of the image (no longer owned by the
- // application):
- pSwapchain->images[index].ownedByApp = false;
- }
- }
- }
+ if (pPresentInfo &&
+ ((result == VK_SUCCESS) ||
+ (result == VK_SUBOPTIMAL_KHR))) {
+ for (uint32_t i = 0;
+ i < pPresentInfo->swapchainCount; i++) {
+ int index = pPresentInfo->pImageIndices[i];
+ SwpSwapchain *pSwapchain =
+ &my_data->swapchainMap
+ [pPresentInfo->pSwapchains[i]];
+ if (pSwapchain) {
+ // Change the state of the image (no
+ // longer owned by the
+ // application):
+ pSwapchain->images[index].ownedByApp =
+ false;
+ }
+ }
+ }
- return result;
- }
- return VK_ERROR_VALIDATION_FAILED_EXT;
-}
+ return result;
+ }
+ return VK_ERROR_VALIDATION_FAILED_EXT;
+ }
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(
- VkDevice device,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- VkQueue* pQueue)
-{
- VkBool32 skipCall = VK_FALSE;
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(
+ VkDevice device, uint32_t queueFamilyIndex,
+ uint32_t queueIndex, VkQueue * pQueue) {
+ VkBool32 skipCall = VK_FALSE;
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(device), layer_data_map);
- if (VK_FALSE == skipCall) {
- // Call down the call chain:
- my_data->device_dispatch_table->GetDeviceQueue(
- device, queueFamilyIndex, queueIndex, pQueue);
+ if (VK_FALSE == skipCall) {
+ // Call down the call chain:
+ my_data->device_dispatch_table->GetDeviceQueue(
+ device, queueFamilyIndex, queueIndex, pQueue);
- // Remember the queue's handle, and link it to the device:
- SwpDevice *pDevice = &my_data->deviceMap[device];
- my_data->queueMap[&pQueue].queue = *pQueue;
- if (pDevice) {
- pDevice->queues[*pQueue] = &my_data->queueMap[*pQueue];
- }
- my_data->queueMap[&pQueue].pDevice = pDevice;
- my_data->queueMap[&pQueue].queueFamilyIndex = queueFamilyIndex;
- }
-}
+ // Remember the queue's handle, and link it to the
+ // device:
+ SwpDevice *pDevice = &my_data->deviceMap[device];
+ my_data->queueMap[&pQueue].queue = *pQueue;
+ if (pDevice) {
+ pDevice->queues[*pQueue] =
+ &my_data->queueMap[*pQueue];
+ }
+ my_data->queueMap[&pQueue].pDevice = pDevice;
+ my_data->queueMap[&pQueue].queueFamilyIndex =
+ queueFamilyIndex;
+ }
+ }
+ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
+ vkCreateDebugReportCallbackEXT(
+ VkInstance instance,
+ const VkDebugReportCallbackCreateInfoEXT *
+ pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDebugReportCallbackEXT *pMsgCallback) {
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ VkResult result = my_data->instance_dispatch_table
+ ->CreateDebugReportCallbackEXT(
+ instance, pCreateInfo,
+ pAllocator, pMsgCallback);
+ if (VK_SUCCESS == result) {
+ result = layer_create_msg_callback(
+ my_data->report_data, pCreateInfo, pAllocator,
+ pMsgCallback);
+ }
+ return result;
+ }
-VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
- VkInstance instance,
- const VkDebugReportCallbackCreateInfoEXT* pCreateInfo,
- const VkAllocationCallbacks* pAllocator,
- VkDebugReportCallbackEXT* pMsgCallback)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- VkResult result = my_data->instance_dispatch_table->CreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pMsgCallback);
- if (VK_SUCCESS == result) {
- result = layer_create_msg_callback(my_data->report_data, pCreateInfo, pAllocator, pMsgCallback);
- }
- return result;
-}
+ VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDestroyDebugReportCallbackEXT(
+ VkInstance instance,
+ VkDebugReportCallbackEXT msgCallback,
+ const VkAllocationCallbacks *pAllocator) {
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ my_data->instance_dispatch_table
+ ->DestroyDebugReportCallbackEXT(
+ instance, msgCallback, pAllocator);
+ layer_destroy_msg_callback(my_data->report_data,
+ msgCallback, pAllocator);
+ }
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT msgCallback, const VkAllocationCallbacks *pAllocator)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- my_data->instance_dispatch_table->DestroyDebugReportCallbackEXT(instance, msgCallback, pAllocator);
- layer_destroy_msg_callback(my_data->report_data, msgCallback, pAllocator);
-}
+ VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL
+ vkDebugReportMessageEXT(
+ VkInstance instance, VkDebugReportFlagsEXT flags,
+ VkDebugReportObjectTypeEXT objType, uint64_t object,
+ size_t location, int32_t msgCode,
+ const char *pLayerPrefix, const char *pMsg) {
+ layer_data *my_data = get_my_data_ptr(
+ get_dispatch_key(instance), layer_data_map);
+ my_data->instance_dispatch_table->DebugReportMessageEXT(
+ instance, flags, objType, object, location, msgCode,
+ pLayerPrefix, pMsg);
+ }
-VK_LAYER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
- VkInstance instance,
- VkDebugReportFlagsEXT flags,
- VkDebugReportObjectTypeEXT objType,
- uint64_t object,
- size_t location,
- int32_t msgCode,
- const char* pLayerPrefix,
- const char* pMsg)
-{
- layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- my_data->instance_dispatch_table->DebugReportMessageEXT(instance, flags, objType, object, location, msgCode, pLayerPrefix, pMsg);
-}
+ VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetDeviceProcAddr(VkDevice device,
+ const char *funcName) {
+ if (!strcmp("vkGetDeviceProcAddr", funcName))
+ return (PFN_vkVoidFunction)vkGetDeviceProcAddr;
+ if (!strcmp(funcName, "vkDestroyDevice"))
+ return (PFN_vkVoidFunction)vkDestroyDevice;
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char* funcName)
-{
- if (!strcmp("vkGetDeviceProcAddr", funcName))
- return (PFN_vkVoidFunction) vkGetDeviceProcAddr;
- if (!strcmp(funcName, "vkDestroyDevice"))
- return (PFN_vkVoidFunction) vkDestroyDevice;
+ if (device == VK_NULL_HANDLE) {
+ return NULL;
+ }
- if (device == VK_NULL_HANDLE) {
- return NULL;
- }
+ layer_data *my_data;
- layer_data *my_data;
+ my_data = get_my_data_ptr(get_dispatch_key(device),
+ layer_data_map);
+ VkLayerDispatchTable *pDisp =
+ my_data->device_dispatch_table;
+ if (my_data->deviceMap.size() != 0 &&
+ my_data->deviceMap[device]
+ .swapchainExtensionEnabled) {
+ if (!strcmp("vkCreateSwapchainKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkCreateSwapchainKHR);
+ if (!strcmp("vkDestroySwapchainKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkDestroySwapchainKHR);
+ if (!strcmp("vkGetSwapchainImagesKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetSwapchainImagesKHR);
+ if (!strcmp("vkAcquireNextImageKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkAcquireNextImageKHR);
+ if (!strcmp("vkQueuePresentKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkQueuePresentKHR);
+ }
+ if (!strcmp("vkGetDeviceQueue", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetDeviceQueue);
- my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkLayerDispatchTable *pDisp = my_data->device_dispatch_table;
- if (my_data->deviceMap.size() != 0 &&
- my_data->deviceMap[device].swapchainExtensionEnabled)
- {
- if (!strcmp("vkCreateSwapchainKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkCreateSwapchainKHR);
- if (!strcmp("vkDestroySwapchainKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkDestroySwapchainKHR);
- if (!strcmp("vkGetSwapchainImagesKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetSwapchainImagesKHR);
- if (!strcmp("vkAcquireNextImageKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkAcquireNextImageKHR);
- if (!strcmp("vkQueuePresentKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkQueuePresentKHR);
- }
- if (!strcmp("vkGetDeviceQueue", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetDeviceQueue);
+ if (pDisp->GetDeviceProcAddr == NULL)
+ return NULL;
+ return pDisp->GetDeviceProcAddr(device, funcName);
+ }
- if (pDisp->GetDeviceProcAddr == NULL)
- return NULL;
- return pDisp->GetDeviceProcAddr(device, funcName);
-}
+ VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
+ vkGetInstanceProcAddr(VkInstance instance,
+ const char *funcName) {
+ if (!strcmp("vkGetInstanceProcAddr", funcName))
+ return (PFN_vkVoidFunction)vkGetInstanceProcAddr;
+ if (!strcmp(funcName, "vkCreateInstance"))
+ return (PFN_vkVoidFunction)vkCreateInstance;
+ if (!strcmp(funcName, "vkDestroyInstance"))
+ return (PFN_vkVoidFunction)vkDestroyInstance;
+ if (!strcmp(funcName, "vkCreateDevice"))
+ return (PFN_vkVoidFunction)vkCreateDevice;
+ if (!strcmp(funcName, "vkEnumeratePhysicalDevices"))
+ return (
+ PFN_vkVoidFunction)vkEnumeratePhysicalDevices;
+ if (!strcmp(funcName,
+ "vkEnumerateInstanceLayerProperties"))
+ return (PFN_vkVoidFunction)
+ vkEnumerateInstanceLayerProperties;
+ if (!strcmp(funcName,
+ "vkEnumerateInstanceExtensionProperties"))
+ return (PFN_vkVoidFunction)
+ vkEnumerateInstanceExtensionProperties;
+ if (!strcmp(funcName,
+ "vkGetPhysicalDeviceQueueFamilyProperties"))
+ return (PFN_vkVoidFunction)
+ vkGetPhysicalDeviceQueueFamilyProperties;
-VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* funcName)
-{
- if (!strcmp("vkGetInstanceProcAddr", funcName))
- return (PFN_vkVoidFunction) vkGetInstanceProcAddr;
- if (!strcmp(funcName, "vkCreateInstance"))
- return (PFN_vkVoidFunction) vkCreateInstance;
- if (!strcmp(funcName, "vkDestroyInstance"))
- return (PFN_vkVoidFunction) vkDestroyInstance;
- if (!strcmp(funcName, "vkCreateDevice"))
- return (PFN_vkVoidFunction) vkCreateDevice;
- if (!strcmp(funcName, "vkEnumeratePhysicalDevices"))
- return (PFN_vkVoidFunction) vkEnumeratePhysicalDevices;
- if (!strcmp(funcName, "vkEnumerateInstanceLayerProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceLayerProperties;
- if (!strcmp(funcName, "vkEnumerateInstanceExtensionProperties"))
- return (PFN_vkVoidFunction) vkEnumerateInstanceExtensionProperties;
- if (!strcmp(funcName, "vkGetPhysicalDeviceQueueFamilyProperties"))
- return (PFN_vkVoidFunction) vkGetPhysicalDeviceQueueFamilyProperties;
+ if (instance == VK_NULL_HANDLE) {
+ return NULL;
+ }
- if (instance == VK_NULL_HANDLE) {
- return NULL;
- }
+ PFN_vkVoidFunction addr;
- PFN_vkVoidFunction addr;
-
- layer_data *my_data;
- my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
- VkLayerInstanceDispatchTable* pTable = my_data->instance_dispatch_table;
- addr = debug_report_get_instance_proc_addr(my_data->report_data, funcName);
- if (addr) {
- return addr;
- }
+ layer_data *my_data;
+ my_data = get_my_data_ptr(get_dispatch_key(instance),
+ layer_data_map);
+ VkLayerInstanceDispatchTable *pTable =
+ my_data->instance_dispatch_table;
+ addr = debug_report_get_instance_proc_addr(
+ my_data->report_data, funcName);
+ if (addr) {
+ return addr;
+ }
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- if (my_data->instanceMap.size() != 0 &&
- my_data->instanceMap[instance].androidSurfaceExtensionEnabled)
- {
- if (!strcmp("vkCreateAndroidSurfaceKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkCreateAndroidSurfaceKHR);
- }
+ if (my_data->instanceMap.size() != 0 &&
+ my_data->instanceMap[instance]
+ .androidSurfaceExtensionEnabled) {
+ if (!strcmp("vkCreateAndroidSurfaceKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkCreateAndroidSurfaceKHR);
+ }
#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
- if (my_data->instanceMap.size() != 0 &&
- my_data->instanceMap[instance].mirSurfaceExtensionEnabled)
- {
- if (!strcmp("vkCreateMirSurfaceKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkCreateMirSurfaceKHR);
- if (!strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceMirPresentationSupportKHR);
- }
+ if (my_data->instanceMap.size() != 0 &&
+ my_data->instanceMap[instance]
+ .mirSurfaceExtensionEnabled) {
+ if (!strcmp("vkCreateMirSurfaceKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkCreateMirSurfaceKHR);
+ if (!strcmp("vkGetPhysicalDeviceMirPresentationSupp"
+ "ortKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceMirPresentationSupportKHR);
+ }
#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- if (my_data->instanceMap.size() != 0 &&
- my_data->instanceMap[instance].waylandSurfaceExtensionEnabled)
- {
- if (!strcmp("vkCreateWaylandSurfaceKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkCreateWaylandSurfaceKHR);
- if (!strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceWaylandPresentationSupportKHR);
- }
+ if (my_data->instanceMap.size() != 0 &&
+ my_data->instanceMap[instance]
+ .waylandSurfaceExtensionEnabled) {
+ if (!strcmp("vkCreateWaylandSurfaceKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkCreateWaylandSurfaceKHR);
+ if (!strcmp("vkGetPhysicalDeviceWaylandPresentation"
+ "SupportKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceWaylandPresentationSupportKHR);
+ }
#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
- if (my_data->instanceMap.size() != 0 &&
- my_data->instanceMap[instance].win32SurfaceExtensionEnabled)
- {
- if (!strcmp("vkCreateWin32SurfaceKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkCreateWin32SurfaceKHR);
- if (!strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceWin32PresentationSupportKHR);
- }
+ if (my_data->instanceMap.size() != 0 &&
+ my_data->instanceMap[instance]
+ .win32SurfaceExtensionEnabled) {
+ if (!strcmp("vkCreateWin32SurfaceKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkCreateWin32SurfaceKHR);
+ if (!strcmp("vkGetPhysicalDeviceWin32PresentationSu"
+ "pportKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceWin32PresentationSupportKHR);
+ }
#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
- if (my_data->instanceMap.size() != 0 &&
- my_data->instanceMap[instance].xcbSurfaceExtensionEnabled)
- {
- if (!strcmp("vkCreateXcbSurfaceKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkCreateXcbSurfaceKHR);
- if (!strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceXcbPresentationSupportKHR);
- }
+ if (my_data->instanceMap.size() != 0 &&
+ my_data->instanceMap[instance]
+ .xcbSurfaceExtensionEnabled) {
+ if (!strcmp("vkCreateXcbSurfaceKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkCreateXcbSurfaceKHR);
+ if (!strcmp("vkGetPhysicalDeviceXcbPresentationSupp"
+ "ortKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceXcbPresentationSupportKHR);
+ }
#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
- if (my_data->instanceMap.size() != 0 &&
- my_data->instanceMap[instance].xlibSurfaceExtensionEnabled)
- {
- if (!strcmp("vkCreateXlibSurfaceKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkCreateXlibSurfaceKHR);
- if (!strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceXlibPresentationSupportKHR);
- }
+ if (my_data->instanceMap.size() != 0 &&
+ my_data->instanceMap[instance]
+ .xlibSurfaceExtensionEnabled) {
+ if (!strcmp("vkCreateXlibSurfaceKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkCreateXlibSurfaceKHR);
+ if (!strcmp("vkGetPhysicalDeviceXlibPresentationSup"
+ "portKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceXlibPresentationSupportKHR);
+ }
#endif // VK_USE_PLATFORM_XLIB_KHR
- if (my_data->instanceMap.size() != 0 &&
- my_data->instanceMap[instance].surfaceExtensionEnabled)
- {
- if (!strcmp("vkDestroySurfaceKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkDestroySurfaceKHR);
- if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceSurfaceSupportKHR);
- if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
- if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceSurfaceFormatsKHR);
- if (!strcmp("vkGetPhysicalDeviceSurfacePresentModesKHR", funcName))
- return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceSurfacePresentModesKHR);
- }
+ if (my_data->instanceMap.size() != 0 &&
+ my_data->instanceMap[instance]
+ .surfaceExtensionEnabled) {
+ if (!strcmp("vkDestroySurfaceKHR", funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkDestroySurfaceKHR);
+ if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceSurfaceSupportKHR);
+ if (!strcmp(
+ "vkGetPhysicalDeviceSurfaceCapabilitiesKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
+ if (!strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceSurfaceFormatsKHR);
+ if (!strcmp(
+ "vkGetPhysicalDeviceSurfacePresentModesKHR",
+ funcName))
+ return reinterpret_cast<PFN_vkVoidFunction>(
+ vkGetPhysicalDeviceSurfacePresentModesKHR);
+ }
- if (pTable->GetInstanceProcAddr == NULL)
- return NULL;
- return pTable->GetInstanceProcAddr(instance, funcName);
-}
-
+ if (pTable->GetInstanceProcAddr == NULL)
+ return NULL;
+ return pTable->GetInstanceProcAddr(instance, funcName);
+ }
diff --git a/layers/swapchain.h b/layers/swapchain.h
index 92e9714..df4ff8c 100644
--- a/layers/swapchain.h
+++ b/layers/swapchain.h
@@ -40,133 +40,180 @@
#include <vector>
#include <unordered_map>
-static const VkLayerProperties globalLayerProps[] = {
- {
- "VK_LAYER_LUNARG_swapchain",
- VK_API_VERSION, // specVersion
- VK_MAKE_VERSION(0, 1, 0), // implementationVersion
- "layer: swapchain",
- }
-};
+static const VkLayerProperties globalLayerProps[] = {{
+ "VK_LAYER_LUNARG_swapchain",
+ VK_API_VERSION, // specVersion
+ VK_MAKE_VERSION(0, 1, 0), // implementationVersion
+ "layer: swapchain",
+}};
-static const VkLayerProperties deviceLayerProps[] = {
- {
- "VK_LAYER_LUNARG_swapchain",
- VK_API_VERSION, // specVersion
- VK_MAKE_VERSION(0, 1, 0), // implementationVersion
- "layer: swapchain",
- }
-};
-
+static const VkLayerProperties deviceLayerProps[] = {{
+ "VK_LAYER_LUNARG_swapchain",
+ VK_API_VERSION, // specVersion
+ VK_MAKE_VERSION(0, 1, 0), // implementationVersion
+ "layer: swapchain",
+}};
using namespace std;
-
// Swapchain ERROR codes
-typedef enum _SWAPCHAIN_ERROR
-{
- SWAPCHAIN_INVALID_HANDLE, // Handle used that isn't currently valid
- SWAPCHAIN_NULL_POINTER, // Pointer set to NULL, instead of being a valid pointer
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED, // Did not enable WSI extension, but called WSI function
- SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN, // Called vkDestroyDevice() before vkDestroySwapchainKHR()
- SWAPCHAIN_CREATE_UNSUPPORTED_SURFACE, // Called vkCreateSwapchainKHR() with a pCreateInfo->surface that wasn't seen as supported by vkGetPhysicalDeviceSurfaceSupportKHR for the device
- SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY, // Called vkCreateSwapchainKHR() without calling a query (e.g. vkGetPhysicalDeviceSurfaceCapabilitiesKHR())
- SWAPCHAIN_CREATE_SWAP_BAD_MIN_IMG_COUNT, // Called vkCreateSwapchainKHR() with out-of-bounds minImageCount
- SWAPCHAIN_CREATE_SWAP_OUT_OF_BOUNDS_EXTENTS,// Called vkCreateSwapchainKHR() with out-of-bounds imageExtent
- SWAPCHAIN_CREATE_SWAP_EXTENTS_NO_MATCH_WIN, // Called vkCreateSwapchainKHR() with imageExtent that doesn't match window's extent
- SWAPCHAIN_CREATE_SWAP_BAD_PRE_TRANSFORM, // Called vkCreateSwapchainKHR() with a non-supported preTransform
- SWAPCHAIN_CREATE_SWAP_BAD_COMPOSITE_ALPHA, // Called vkCreateSwapchainKHR() with a non-supported compositeAlpha
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_ARRAY_SIZE, // Called vkCreateSwapchainKHR() with a non-supported imageArraySize
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_USAGE_FLAGS, // Called vkCreateSwapchainKHR() with a non-supported imageUsageFlags
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_COLOR_SPACE, // Called vkCreateSwapchainKHR() with a non-supported imageColorSpace
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_FORMAT, // Called vkCreateSwapchainKHR() with a non-supported imageFormat
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_FMT_CLR_SP, // Called vkCreateSwapchainKHR() with a non-supported imageColorSpace
- SWAPCHAIN_CREATE_SWAP_BAD_PRESENT_MODE, // Called vkCreateSwapchainKHR() with a non-supported presentMode
- SWAPCHAIN_CREATE_SWAP_BAD_SHARING_MODE, // Called vkCreateSwapchainKHR() with a non-supported imageSharingMode
- SWAPCHAIN_CREATE_SWAP_BAD_SHARING_VALUES, // Called vkCreateSwapchainKHR() with bad values when imageSharingMode is VK_SHARING_MODE_CONCURRENT
- SWAPCHAIN_CREATE_SWAP_DIFF_SURFACE, // Called vkCreateSwapchainKHR() with pCreateInfo->oldSwapchain that has a different surface than pCreateInfo->surface
- SWAPCHAIN_DESTROY_SWAP_DIFF_DEVICE, // Called vkDestroySwapchainKHR() with a different VkDevice than vkCreateSwapchainKHR()
- SWAPCHAIN_APP_OWNS_TOO_MANY_IMAGES, // vkAcquireNextImageKHR() asked for more images than are available
- SWAPCHAIN_INDEX_TOO_LARGE, // Index is too large for swapchain
- SWAPCHAIN_INDEX_NOT_IN_USE, // vkQueuePresentKHR() given index that is not owned by app
- SWAPCHAIN_BAD_BOOL, // VkBool32 that doesn't have value of VK_TRUE or VK_FALSE (e.g. is a non-zero form of true)
- SWAPCHAIN_INVALID_COUNT, // Second time a query called, the pCount value didn't match first time
- SWAPCHAIN_WRONG_STYPE, // The sType for a struct has the wrong value
- SWAPCHAIN_WRONG_NEXT, // The pNext for a struct is not NULL
- SWAPCHAIN_ZERO_VALUE, // A value should be non-zero
- SWAPCHAIN_INCOMPATIBLE_ALLOCATOR, // pAllocator must be compatible (i.e. NULL or not) when object is created and destroyed
- SWAPCHAIN_DID_NOT_QUERY_QUEUE_FAMILIES, // A function using a queueFamilyIndex was called before vkGetPhysicalDeviceQueueFamilyProperties() was called
- SWAPCHAIN_QUEUE_FAMILY_INDEX_TOO_LARGE, // A queueFamilyIndex value is not less than pQueueFamilyPropertyCount returned by vkGetPhysicalDeviceQueueFamilyProperties()
- SWAPCHAIN_SURFACE_NOT_SUPPORTED_WITH_QUEUE, // A surface is not supported by a given queueFamilyIndex, as seen by vkGetPhysicalDeviceSurfaceSupportKHR()
+typedef enum _SWAPCHAIN_ERROR {
+ SWAPCHAIN_INVALID_HANDLE, // Handle used that isn't currently valid
+ SWAPCHAIN_NULL_POINTER, // Pointer set to NULL, instead of being a valid
+ // pointer
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED, // Did not enable WSI extension, but
+ // called WSI function
+ SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN, // Called vkDestroyDevice() before
+ // vkDestroySwapchainKHR()
+ SWAPCHAIN_CREATE_UNSUPPORTED_SURFACE, // Called vkCreateSwapchainKHR() with
+ // a pCreateInfo->surface that wasn't
+ // seen as supported by
+ // vkGetPhysicalDeviceSurfaceSupportKHR
+ // for the device
+ SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY, // Called vkCreateSwapchainKHR()
+ // without calling a query (e.g.
+ // vkGetPhysicalDeviceSurfaceCapabilitiesKHR())
+ SWAPCHAIN_CREATE_SWAP_BAD_MIN_IMG_COUNT, // Called vkCreateSwapchainKHR()
+ // with out-of-bounds minImageCount
+ SWAPCHAIN_CREATE_SWAP_OUT_OF_BOUNDS_EXTENTS, // Called
+ // vkCreateSwapchainKHR() with
+ // out-of-bounds imageExtent
+ SWAPCHAIN_CREATE_SWAP_EXTENTS_NO_MATCH_WIN, // Called vkCreateSwapchainKHR()
+ // with imageExtent that doesn't
+ // match window's extent
+ SWAPCHAIN_CREATE_SWAP_BAD_PRE_TRANSFORM, // Called vkCreateSwapchainKHR()
+ // with a non-supported
+ // preTransform
+ SWAPCHAIN_CREATE_SWAP_BAD_COMPOSITE_ALPHA, // Called vkCreateSwapchainKHR()
+ // with a non-supported
+ // compositeAlpha
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_ARRAY_SIZE, // Called vkCreateSwapchainKHR()
+ // with a non-supported
+ // imageArraySize
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_USAGE_FLAGS, // Called vkCreateSwapchainKHR()
+ // with a non-supported
+ // imageUsageFlags
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_COLOR_SPACE, // Called vkCreateSwapchainKHR()
+ // with a non-supported
+ // imageColorSpace
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_FORMAT, // Called vkCreateSwapchainKHR() with
+ // a non-supported imageFormat
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_FMT_CLR_SP, // Called vkCreateSwapchainKHR()
+ // with a non-supported
+ // imageColorSpace
+ SWAPCHAIN_CREATE_SWAP_BAD_PRESENT_MODE, // Called vkCreateSwapchainKHR()
+ // with a non-supported presentMode
+ SWAPCHAIN_CREATE_SWAP_BAD_SHARING_MODE, // Called vkCreateSwapchainKHR()
+ // with a non-supported
+ // imageSharingMode
+ SWAPCHAIN_CREATE_SWAP_BAD_SHARING_VALUES, // Called vkCreateSwapchainKHR()
+ // with bad values when
+ // imageSharingMode is
+ // VK_SHARING_MODE_CONCURRENT
+ SWAPCHAIN_CREATE_SWAP_DIFF_SURFACE, // Called vkCreateSwapchainKHR() with
+ // pCreateInfo->oldSwapchain that has a
+ // different surface than
+ // pCreateInfo->surface
+ SWAPCHAIN_DESTROY_SWAP_DIFF_DEVICE, // Called vkDestroySwapchainKHR() with a
+ // different VkDevice than
+ // vkCreateSwapchainKHR()
+ SWAPCHAIN_APP_OWNS_TOO_MANY_IMAGES, // vkAcquireNextImageKHR() asked for
+ // more images than are available
+ SWAPCHAIN_INDEX_TOO_LARGE, // Index is too large for swapchain
+ SWAPCHAIN_INDEX_NOT_IN_USE, // vkQueuePresentKHR() given index that is not
+ // owned by app
+ SWAPCHAIN_BAD_BOOL, // VkBool32 that doesn't have value of VK_TRUE or
+ // VK_FALSE (e.g. is a non-zero form of true)
+ SWAPCHAIN_INVALID_COUNT, // Second time a query called, the pCount value
+ // didn't match first time
+ SWAPCHAIN_WRONG_STYPE, // The sType for a struct has the wrong value
+ SWAPCHAIN_WRONG_NEXT, // The pNext for a struct is not NULL
+ SWAPCHAIN_ZERO_VALUE, // A value should be non-zero
+ SWAPCHAIN_INCOMPATIBLE_ALLOCATOR, // pAllocator must be compatible (i.e.
+ // NULL or not) when object is created and
+ // destroyed
+ SWAPCHAIN_DID_NOT_QUERY_QUEUE_FAMILIES, // A function using a
+ // queueFamilyIndex was called
+ // before
+ // vkGetPhysicalDeviceQueueFamilyProperties()
+ // was called
+ SWAPCHAIN_QUEUE_FAMILY_INDEX_TOO_LARGE, // A queueFamilyIndex value is not
+ // less than
+ // pQueueFamilyPropertyCount
+ // returned by
+ // vkGetPhysicalDeviceQueueFamilyProperties()
+ SWAPCHAIN_SURFACE_NOT_SUPPORTED_WITH_QUEUE, // A surface is not supported by
+ // a given queueFamilyIndex, as
+ // seen by
+ // vkGetPhysicalDeviceSurfaceSupportKHR()
} SWAPCHAIN_ERROR;
-
// The following is for logging error messages:
#define LAYER_NAME (char *) "Swapchain"
-#define LOG_ERROR_NON_VALID_OBJ(objType, type, obj) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (objType), \
- (uint64_t) (obj), __LINE__, SWAPCHAIN_INVALID_HANDLE, LAYER_NAME, \
- "%s() called with a non-valid %s.", __FUNCTION__, (obj)) \
- : VK_FALSE
-#define LOG_ERROR_NULL_POINTER(objType, type, obj) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (objType), \
- (uint64_t) (obj), 0, SWAPCHAIN_NULL_POINTER, LAYER_NAME, \
- "%s() called with NULL pointer %s.", __FUNCTION__, (obj)) \
- : VK_FALSE
-#define LOG_ERROR_INVALID_COUNT(objType, type, obj, obj2, val, val2) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (objType), \
- (uint64_t) (obj), 0, SWAPCHAIN_INVALID_COUNT, LAYER_NAME, \
- "%s() called with non-NULL %s, and with %s set to a " \
- "value (%d) that is greater than the value (%d) that " \
- "was returned when %s was NULL.", \
- __FUNCTION__, (obj2), (obj), (val), (val2), (obj2)) \
- : VK_FALSE
-#define LOG_ERROR_WRONG_STYPE(objType, type, obj, val) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (objType), \
- (uint64_t) (obj), 0, SWAPCHAIN_WRONG_STYPE, LAYER_NAME, \
- "%s() called with the wrong value for %s->sType " \
- "(expected %s).", \
- __FUNCTION__, (obj), (val)) \
- : VK_FALSE
-#define LOG_ERROR_ZERO_VALUE(objType, type, obj) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (objType), \
- (uint64_t) (obj), 0, SWAPCHAIN_ZERO_VALUE, LAYER_NAME, \
- "%s() called with a zero value for %s.", \
- __FUNCTION__, (obj)) \
- : VK_FALSE
-#define LOG_ERROR(objType, type, obj, enm, fmt, ...) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (objType), \
- (uint64_t) (obj), __LINE__, (enm), LAYER_NAME, (fmt), __VA_ARGS__) \
- : VK_FALSE
+#define LOG_ERROR_NON_VALID_OBJ(objType, type, obj) \
+ (my_data) \
+ ? log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, \
+ (objType), (uint64_t)(obj), __LINE__, \
+ SWAPCHAIN_INVALID_HANDLE, LAYER_NAME, \
+ "%s() called with a non-valid %s.", __FUNCTION__, (obj)) \
+ : VK_FALSE
+#define LOG_ERROR_NULL_POINTER(objType, type, obj) \
+ (my_data) ? log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, \
+ (objType), (uint64_t)(obj), 0, SWAPCHAIN_NULL_POINTER, \
+ LAYER_NAME, "%s() called with NULL pointer %s.", \
+ __FUNCTION__, (obj)) \
+ : VK_FALSE
+#define LOG_ERROR_INVALID_COUNT(objType, type, obj, obj2, val, val2) \
+ (my_data) ? log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, \
+ (objType), (uint64_t)(obj), 0, \
+ SWAPCHAIN_INVALID_COUNT, LAYER_NAME, \
+ "%s() called with non-NULL %s, and with %s set to a " \
+ "value (%d) that is greater than the value (%d) that " \
+ "was returned when %s was NULL.", \
+ __FUNCTION__, (obj2), (obj), (val), (val2), (obj2)) \
+ : VK_FALSE
+#define LOG_ERROR_WRONG_STYPE(objType, type, obj, val) \
+ (my_data) ? log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, \
+ (objType), (uint64_t)(obj), 0, SWAPCHAIN_WRONG_STYPE, \
+ LAYER_NAME, \
+ "%s() called with the wrong value for %s->sType " \
+ "(expected %s).", \
+ __FUNCTION__, (obj), (val)) \
+ : VK_FALSE
+#define LOG_ERROR_ZERO_VALUE(objType, type, obj) \
+ (my_data) ? log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, \
+ (objType), (uint64_t)(obj), 0, SWAPCHAIN_ZERO_VALUE, \
+ LAYER_NAME, "%s() called with a zero value for %s.", \
+ __FUNCTION__, (obj)) \
+ : VK_FALSE
+#define LOG_ERROR(objType, type, obj, enm, fmt, ...) \
+ (my_data) ? log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, \
+ (objType), (uint64_t)(obj), __LINE__, (enm), \
+ LAYER_NAME, (fmt), __VA_ARGS__) \
+ : VK_FALSE
#define LOG_ERROR_QUEUE_FAMILY_INDEX_TOO_LARGE(objType, type, obj, val1, val2) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (objType), \
- (uint64_t) (obj), 0, SWAPCHAIN_QUEUE_FAMILY_INDEX_TOO_LARGE, LAYER_NAME, \
- "%s() called with a queueFamilyIndex that is too " \
- "large (i.e. %d). The maximum value (returned " \
- "by vkGetPhysicalDeviceQueueFamilyProperties) is " \
- "only %d.\n", \
- __FUNCTION__, (val1), (val2)) \
- : VK_FALSE
-#define LOG_PERF_WARNING(objType, type, obj, enm, fmt, ...) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, (objType), \
- (uint64_t) (obj), __LINE__, (enm), LAYER_NAME, (fmt), __VA_ARGS__) \
- : VK_FALSE
-#define LOG_INFO_WRONG_NEXT(objType, type, obj) \
- (my_data) ? \
- log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, (objType), \
- (uint64_t) (obj), 0, SWAPCHAIN_WRONG_NEXT, LAYER_NAME, \
- "%s() called with non-NULL value for %s->pNext.", \
- __FUNCTION__, (obj)) \
- : VK_FALSE
-
+ (my_data) ? log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, \
+ (objType), (uint64_t)(obj), 0, \
+ SWAPCHAIN_QUEUE_FAMILY_INDEX_TOO_LARGE, LAYER_NAME, \
+ "%s() called with a queueFamilyIndex that is too " \
+ "large (i.e. %d). The maximum value (returned " \
+ "by vkGetPhysicalDeviceQueueFamilyProperties) is " \
+ "only %d.\n", \
+ __FUNCTION__, (val1), (val2)) \
+ : VK_FALSE
+#define LOG_PERF_WARNING(objType, type, obj, enm, fmt, ...) \
+ (my_data) \
+ ? log_msg(my_data->report_data, VK_DEBUG_REPORT_PERF_WARN_BIT_EXT, \
+ (objType), (uint64_t)(obj), __LINE__, (enm), LAYER_NAME, \
+ (fmt), __VA_ARGS__) \
+ : VK_FALSE
+#define LOG_INFO_WRONG_NEXT(objType, type, obj) \
+ (my_data) ? log_msg(my_data->report_data, VK_DEBUG_REPORT_INFO_BIT_EXT, \
+ (objType), (uint64_t)(obj), 0, SWAPCHAIN_WRONG_NEXT, \
+ LAYER_NAME, \
+ "%s() called with non-NULL value for %s->pNext.", \
+ __FUNCTION__, (obj)) \
+ : VK_FALSE
// NOTE: The following struct's/typedef's are for keeping track of
// info that is used for validating the WSI extensions.
@@ -181,7 +228,8 @@
struct _SwpQueue;
typedef _SwpInstance SwpInstance;
-typedef _SwpSurface SwpSurface;;
+typedef _SwpSurface SwpSurface;
+;
typedef _SwpPhysicalDevice SwpPhysicalDevice;
typedef _SwpDevice SwpDevice;
typedef _SwpSwapchain SwpSwapchain;
@@ -194,42 +242,49 @@
VkInstance instance;
// Remember the VkSurfaceKHR's that are created for this VkInstance:
- unordered_map<VkSurfaceKHR, SwpSurface*> surfaces;
+ unordered_map<VkSurfaceKHR, SwpSurface *> surfaces;
// When vkEnumeratePhysicalDevices is called, the VkPhysicalDevice's are
// remembered:
- unordered_map<const void*, SwpPhysicalDevice*> physicalDevices;
+ unordered_map<const void *, SwpPhysicalDevice *> physicalDevices;
- // Set to true if VK_KHR_SURFACE_EXTENSION_NAME was enabled for this VkInstance:
+ // Set to true if VK_KHR_SURFACE_EXTENSION_NAME was enabled for this
+ // VkInstance:
bool surfaceExtensionEnabled;
- // TODO: Add additional booleans for platform-specific extensions:
+// TODO: Add additional booleans for platform-specific extensions:
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- // Set to true if VK_KHR_ANDROID_SURFACE_EXTENSION_NAME was enabled for this VkInstance:
+ // Set to true if VK_KHR_ANDROID_SURFACE_EXTENSION_NAME was enabled for this
+ // VkInstance:
bool androidSurfaceExtensionEnabled;
#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
- // Set to true if VK_KHR_MIR_SURFACE_EXTENSION_NAME was enabled for this VkInstance:
+ // Set to true if VK_KHR_MIR_SURFACE_EXTENSION_NAME was enabled for this
+ // VkInstance:
bool mirSurfaceExtensionEnabled;
#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- // Set to true if VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME was enabled for this VkInstance:
+ // Set to true if VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME was enabled for this
+ // VkInstance:
bool waylandSurfaceExtensionEnabled;
#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
- // Set to true if VK_KHR_WIN32_SURFACE_EXTENSION_NAME was enabled for this VkInstance:
+ // Set to true if VK_KHR_WIN32_SURFACE_EXTENSION_NAME was enabled for this
+ // VkInstance:
bool win32SurfaceExtensionEnabled;
#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
- // Set to true if VK_KHR_XCB_SURFACE_EXTENSION_NAME was enabled for this VkInstance:
+ // Set to true if VK_KHR_XCB_SURFACE_EXTENSION_NAME was enabled for this
+ // VkInstance:
bool xcbSurfaceExtensionEnabled;
#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
- // Set to true if VK_KHR_XLIB_SURFACE_EXTENSION_NAME was enabled for this VkInstance:
+ // Set to true if VK_KHR_XLIB_SURFACE_EXTENSION_NAME was enabled for this
+ // VkInstance:
bool xlibSurfaceExtensionEnabled;
#endif // VK_USE_PLATFORM_XLIB_KHR
};
-
+
// Create one of these for each VkSurfaceKHR:
struct _SwpSurface {
// The actual handle for this VkSurfaceKHR:
@@ -240,7 +295,7 @@
// When vkCreateSwapchainKHR is called, the VkSwapchainKHR's are
// remembered:
- unordered_map<VkSwapchainKHR, SwpSwapchain*> swapchains;
+ unordered_map<VkSwapchainKHR, SwpSwapchain *> swapchains;
// 'true' if pAllocator was non-NULL when vkCreate*SurfaceKHR was called:
bool usedAllocatorToCreate;
@@ -274,25 +329,27 @@
// Record all surfaces that vkGetPhysicalDeviceSurfaceSupportKHR() was
// called for:
- unordered_map<VkSurfaceKHR, SwpSurface*> supportedSurfaces;
+ unordered_map<VkSurfaceKHR, SwpSurface *> supportedSurfaces;
-// TODO: Record/use this info per-surface, not per-device, once a
-// non-dispatchable surface object is added to WSI:
+ // TODO: Record/use this info per-surface, not per-device, once a
+ // non-dispatchable surface object is added to WSI:
// Results of vkGetPhysicalDeviceSurfaceCapabilitiesKHR():
bool gotSurfaceCapabilities;
VkSurfaceCapabilitiesKHR surfaceCapabilities;
-// TODO: Record/use this info per-surface, not per-device, once a
-// non-dispatchable surface object is added to WSI:
- // Count and VkSurfaceFormatKHR's returned by vkGetPhysicalDeviceSurfaceFormatsKHR():
+ // TODO: Record/use this info per-surface, not per-device, once a
+ // non-dispatchable surface object is added to WSI:
+ // Count and VkSurfaceFormatKHR's returned by
+ // vkGetPhysicalDeviceSurfaceFormatsKHR():
uint32_t surfaceFormatCount;
- VkSurfaceFormatKHR* pSurfaceFormats;
+ VkSurfaceFormatKHR *pSurfaceFormats;
-// TODO: Record/use this info per-surface, not per-device, once a
-// non-dispatchable surface object is added to WSI:
- // Count and VkPresentModeKHR's returned by vkGetPhysicalDeviceSurfacePresentModesKHR():
+ // TODO: Record/use this info per-surface, not per-device, once a
+ // non-dispatchable surface object is added to WSI:
+ // Count and VkPresentModeKHR's returned by
+ // vkGetPhysicalDeviceSurfacePresentModesKHR():
uint32_t presentModeCount;
- VkPresentModeKHR* pPresentModes;
+ VkPresentModeKHR *pPresentModes;
};
// Create one of these for each VkDevice within a VkInstance:
@@ -308,10 +365,10 @@
// When vkCreateSwapchainKHR is called, the VkSwapchainKHR's are
// remembered:
- unordered_map<VkSwapchainKHR, SwpSwapchain*> swapchains;
+ unordered_map<VkSwapchainKHR, SwpSwapchain *> swapchains;
// When vkGetDeviceQueue is called, the VkQueue's are remembered:
- unordered_map<VkQueue, SwpQueue*> queues;
+ unordered_map<VkQueue, SwpQueue *> queues;
};
// Create one of these for each VkImage within a VkSwapchainKHR:
@@ -362,22 +419,20 @@
struct layer_data {
debug_report_data *report_data;
std::vector<VkDebugReportCallbackEXT> logging_callback;
- VkLayerDispatchTable* device_dispatch_table;
- VkLayerInstanceDispatchTable* instance_dispatch_table;
+ VkLayerDispatchTable *device_dispatch_table;
+ VkLayerInstanceDispatchTable *instance_dispatch_table;
// NOTE: The following are for keeping track of info that is used for
// validating the WSI extensions.
- std::unordered_map<void *, SwpInstance> instanceMap;
- std::unordered_map<VkSurfaceKHR, SwpSurface> surfaceMap;
+ std::unordered_map<void *, SwpInstance> instanceMap;
+ std::unordered_map<VkSurfaceKHR, SwpSurface> surfaceMap;
std::unordered_map<void *, SwpPhysicalDevice> physicalDeviceMap;
- std::unordered_map<void *, SwpDevice> deviceMap;
- std::unordered_map<VkSwapchainKHR, SwpSwapchain> swapchainMap;
- std::unordered_map<void *, SwpQueue> queueMap;
+ std::unordered_map<void *, SwpDevice> deviceMap;
+ std::unordered_map<VkSwapchainKHR, SwpSwapchain> swapchainMap;
+ std::unordered_map<void *, SwpQueue> queueMap;
- layer_data() :
- report_data(nullptr),
- device_dispatch_table(nullptr),
- instance_dispatch_table(nullptr)
- {};
+ layer_data()
+ : report_data(nullptr), device_dispatch_table(nullptr),
+ instance_dispatch_table(nullptr){};
};
#endif // SWAPCHAIN_H
diff --git a/layers/threading.h b/layers/threading.h
index 32d2a9a..162de45 100644
--- a/layers/threading.h
+++ b/layers/threading.h
@@ -35,29 +35,26 @@
#include "vk_layer_logging.h"
// Draw State ERROR codes
-typedef enum _THREADING_CHECKER_ERROR
-{
- THREADING_CHECKER_NONE, // Used for INFO & other non-error messages
- THREADING_CHECKER_MULTIPLE_THREADS, // Object used simultaneously by multiple threads
- THREADING_CHECKER_SINGLE_THREAD_REUSE, // Object used simultaneously by recursion in single thread
+typedef enum _THREADING_CHECKER_ERROR {
+ THREADING_CHECKER_NONE, // Used for INFO & other non-error messages
+ THREADING_CHECKER_MULTIPLE_THREADS, // Object used simultaneously by
+ // multiple threads
+ THREADING_CHECKER_SINGLE_THREAD_REUSE, // Object used simultaneously by
+ // recursion in single thread
} THREADING_CHECKER_ERROR;
struct layer_data {
debug_report_data *report_data;
- VkDebugReportCallbackEXT logging_callback;
+ VkDebugReportCallbackEXT logging_callback;
- layer_data() :
- report_data(nullptr),
- logging_callback(VK_NULL_HANDLE)
- {};
+ layer_data() : report_data(nullptr), logging_callback(VK_NULL_HANDLE){};
};
-static std::unordered_map<void*, layer_data *> layer_data_map;
-static device_table_map threading_device_table_map;
-static instance_table_map threading_instance_table_map;
+static std::unordered_map<void *, layer_data *> layer_data_map;
+static device_table_map threading_device_table_map;
+static instance_table_map threading_instance_table_map;
-static inline debug_report_data *mdd(const void* object)
-{
+static inline debug_report_data *mdd(const void *object) {
dispatch_key key = get_dispatch_key(object);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
return my_data->report_data;
diff --git a/layers/unique_objects.h b/layers/unique_objects.h
index 5b3d5d5..6e073ae 100644
--- a/layers/unique_objects.h
+++ b/layers/unique_objects.h
@@ -50,9 +50,7 @@
struct layer_data {
bool wsi_enabled;
- layer_data() :
- wsi_enabled(false)
- {};
+ layer_data() : wsi_enabled(false){};
};
struct instExts {
@@ -65,93 +63,126 @@
bool win32_enabled;
};
-static std::unordered_map<void*, struct instExts> instanceExtMap;
-static std::unordered_map<void*, layer_data *> layer_data_map;
-static device_table_map unique_objects_device_table_map;
-static instance_table_map unique_objects_instance_table_map;
-// Structure to wrap returned non-dispatchable objects to guarantee they have unique handles
+static std::unordered_map<void *, struct instExts> instanceExtMap;
+static std::unordered_map<void *, layer_data *> layer_data_map;
+static device_table_map unique_objects_device_table_map;
+static instance_table_map unique_objects_instance_table_map;
+// Structure to wrap returned non-dispatchable objects to guarantee they have
+// unique handles
// address of struct will be used as the unique handle
-struct VkUniqueObject
-{
+struct VkUniqueObject {
uint64_t actualObject;
};
// Handle CreateInstance
-static void createInstanceRegisterExtensions(const VkInstanceCreateInfo* pCreateInfo, VkInstance instance)
-{
+static void
+createInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo,
+ VkInstance instance) {
uint32_t i;
- VkLayerInstanceDispatchTable *pDisp = get_dispatch_table(unique_objects_instance_table_map, instance);
+ VkLayerInstanceDispatchTable *pDisp =
+ get_dispatch_table(unique_objects_instance_table_map, instance);
PFN_vkGetInstanceProcAddr gpa = pDisp->GetInstanceProcAddr;
- pDisp->GetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
- pDisp->GetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
- pDisp->GetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR) gpa(instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
- pDisp->GetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR) gpa(instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
+ pDisp->GetPhysicalDeviceSurfaceSupportKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceSupportKHR");
+ pDisp->GetPhysicalDeviceSurfaceCapabilitiesKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
+ pDisp->GetPhysicalDeviceSurfaceFormatsKHR =
+ (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfaceFormatsKHR");
+ pDisp->GetPhysicalDeviceSurfacePresentModesKHR =
+ (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)gpa(
+ instance, "vkGetPhysicalDeviceSurfacePresentModesKHR");
#ifdef VK_USE_PLATFORM_WIN32_KHR
- pDisp->CreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) gpa(instance, "vkCreateWin32SurfaceKHR");
- pDisp->GetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
+ pDisp->CreateWin32SurfaceKHR =
+ (PFN_vkCreateWin32SurfaceKHR)gpa(instance, "vkCreateWin32SurfaceKHR");
+ pDisp->GetPhysicalDeviceWin32PresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR");
#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
- pDisp->CreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR) gpa(instance, "vkCreateXcbSurfaceKHR");
- pDisp->GetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
+ pDisp->CreateXcbSurfaceKHR =
+ (PFN_vkCreateXcbSurfaceKHR)gpa(instance, "vkCreateXcbSurfaceKHR");
+ pDisp->GetPhysicalDeviceXcbPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR");
#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
- pDisp->CreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR) gpa(instance, "vkCreateXlibSurfaceKHR");
- pDisp->GetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
+ pDisp->CreateXlibSurfaceKHR =
+ (PFN_vkCreateXlibSurfaceKHR)gpa(instance, "vkCreateXlibSurfaceKHR");
+ pDisp->GetPhysicalDeviceXlibPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR");
#endif // VK_USE_PLATFORM_XLIB_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
- pDisp->CreateMirSurfaceKHR = (PFN_vkCreateMirSurfaceKHR) gpa(instance, "vkCreateMirSurfaceKHR");
- pDisp->GetPhysicalDeviceMirPresentationSupportKHR = (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceMirPresentationSupportKHR");
+ pDisp->CreateMirSurfaceKHR =
+ (PFN_vkCreateMirSurfaceKHR)gpa(instance, "vkCreateMirSurfaceKHR");
+ pDisp->GetPhysicalDeviceMirPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceMirPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceMirPresentationSupportKHR");
#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- pDisp->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR) gpa(instance, "vkCreateWaylandSurfaceKHR");
- pDisp->GetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR) gpa(instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
+ pDisp->CreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR)gpa(
+ instance, "vkCreateWaylandSurfaceKHR");
+ pDisp->GetPhysicalDeviceWaylandPresentationSupportKHR =
+ (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)gpa(
+ instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR");
#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- pDisp->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR) gpa(instance, "vkCreateAndroidSurfaceKHR");
+ pDisp->CreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR)gpa(
+ instance, "vkCreateAndroidSurfaceKHR");
#endif // VK_USE_PLATFORM_ANDROID_KHR
instanceExtMap[pDisp] = {};
for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SURFACE_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_SURFACE_EXTENSION_NAME) == 0)
instanceExtMap[pDisp].wsi_enabled = true;
#ifdef VK_USE_PLATFORM_XLIB_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_XLIB_SURFACE_EXTENSION_NAME) == 0)
instanceExtMap[pDisp].xlib_enabled = true;
#endif
#ifdef VK_USE_PLATFORM_XCB_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_XCB_SURFACE_EXTENSION_NAME) == 0)
instanceExtMap[pDisp].xcb_enabled = true;
#endif
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME) == 0)
instanceExtMap[pDisp].wayland_enabled = true;
#endif
#ifdef VK_USE_PLATFORM_MIR_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_MIR_SURFACE_EXTENSION_NAME) == 0)
instanceExtMap[pDisp].mir_enabled = true;
#endif
#ifdef VK_USE_PLATFORM_ANDROID_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_ANDROID_SURFACE_EXTENSION_NAME) == 0)
instanceExtMap[pDisp].android_enabled = true;
#endif
#ifdef VK_USE_PLATFORM_WIN32_KHR
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_WIN32_SURFACE_EXTENSION_NAME) == 0)
instanceExtMap[pDisp].win32_enabled = true;
#endif
}
}
-VkResult
-explicit_CreateInstance(
- const VkInstanceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkInstance *pInstance)
-{
- VkLayerInstanceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VkResult explicit_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkInstance *pInstance) {
+ VkLayerInstanceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkCreateInstance fpCreateInstance =
+ (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
if (fpCreateInstance == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -164,7 +195,8 @@
return result;
}
- initInstanceTable(*pInstance, fpGetInstanceProcAddr, unique_objects_instance_table_map);
+ initInstanceTable(*pInstance, fpGetInstanceProcAddr,
+ unique_objects_instance_table_map);
createInstanceRegisterExtensions(pCreateInfo, *pInstance);
@@ -172,36 +204,46 @@
}
// Handle CreateDevice
-static void createDeviceRegisterExtensions(const VkDeviceCreateInfo* pCreateInfo, VkDevice device)
-{
- layer_data *my_device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- VkLayerDispatchTable *pDisp = get_dispatch_table(unique_objects_device_table_map, device);
+static void
+createDeviceRegisterExtensions(const VkDeviceCreateInfo *pCreateInfo,
+ VkDevice device) {
+ layer_data *my_device_data =
+ get_my_data_ptr(get_dispatch_key(device), layer_data_map);
+ VkLayerDispatchTable *pDisp =
+ get_dispatch_table(unique_objects_device_table_map, device);
PFN_vkGetDeviceProcAddr gpa = pDisp->GetDeviceProcAddr;
- pDisp->CreateSwapchainKHR = (PFN_vkCreateSwapchainKHR) gpa(device, "vkCreateSwapchainKHR");
- pDisp->DestroySwapchainKHR = (PFN_vkDestroySwapchainKHR) gpa(device, "vkDestroySwapchainKHR");
- pDisp->GetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR) gpa(device, "vkGetSwapchainImagesKHR");
- pDisp->AcquireNextImageKHR = (PFN_vkAcquireNextImageKHR) gpa(device, "vkAcquireNextImageKHR");
- pDisp->QueuePresentKHR = (PFN_vkQueuePresentKHR) gpa(device, "vkQueuePresentKHR");
+ pDisp->CreateSwapchainKHR =
+ (PFN_vkCreateSwapchainKHR)gpa(device, "vkCreateSwapchainKHR");
+ pDisp->DestroySwapchainKHR =
+ (PFN_vkDestroySwapchainKHR)gpa(device, "vkDestroySwapchainKHR");
+ pDisp->GetSwapchainImagesKHR =
+ (PFN_vkGetSwapchainImagesKHR)gpa(device, "vkGetSwapchainImagesKHR");
+ pDisp->AcquireNextImageKHR =
+ (PFN_vkAcquireNextImageKHR)gpa(device, "vkAcquireNextImageKHR");
+ pDisp->QueuePresentKHR =
+ (PFN_vkQueuePresentKHR)gpa(device, "vkQueuePresentKHR");
my_device_data->wsi_enabled = false;
for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
- if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0)
+ if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
+ VK_KHR_SWAPCHAIN_EXTENSION_NAME) == 0)
my_device_data->wsi_enabled = true;
}
}
-VkResult
-explicit_CreateDevice(
- VkPhysicalDevice gpu,
- const VkDeviceCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *pAllocator,
- VkDevice *pDevice)
-{
- VkLayerDeviceCreateInfo *chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
+VkResult explicit_CreateDevice(VkPhysicalDevice gpu,
+ const VkDeviceCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDevice *pDevice) {
+ VkLayerDeviceCreateInfo *chain_info =
+ get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo);
- PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
- PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
- PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
+ PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
+ PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr =
+ chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
+ PFN_vkCreateDevice fpCreateDevice =
+ (PFN_vkCreateDevice)fpGetInstanceProcAddr(NULL, "vkCreateDevice");
if (fpCreateDevice == NULL) {
return VK_ERROR_INITIALIZATION_FAILED;
}
@@ -215,54 +257,73 @@
}
// Setup layer's device dispatch table
- initDeviceTable(*pDevice, fpGetDeviceProcAddr, unique_objects_device_table_map);
+ initDeviceTable(*pDevice, fpGetDeviceProcAddr,
+ unique_objects_device_table_map);
createDeviceRegisterExtensions(pCreateInfo, *pDevice);
return result;
}
-VkResult explicit_QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
-{
-// UNWRAP USES:
-// 0 : fence,VkFence
+VkResult explicit_QueueSubmit(VkQueue queue, uint32_t submitCount,
+ const VkSubmitInfo *pSubmits, VkFence fence) {
+ // UNWRAP USES:
+ // 0 : fence,VkFence
if (VK_NULL_HANDLE != fence) {
- fence = (VkFence)((VkUniqueObject*)fence)->actualObject;
+ fence = (VkFence)((VkUniqueObject *)fence)->actualObject;
}
-// waitSemaphoreCount : pSubmits[submitCount]->pWaitSemaphores,VkSemaphore
+ // waitSemaphoreCount : pSubmits[submitCount]->pWaitSemaphores,VkSemaphore
std::vector<VkSemaphore> original_pWaitSemaphores = {};
-// signalSemaphoreCount : pSubmits[submitCount]->pSignalSemaphores,VkSemaphore
+ // signalSemaphoreCount :
+ // pSubmits[submitCount]->pSignalSemaphores,VkSemaphore
std::vector<VkSemaphore> original_pSignalSemaphores = {};
if (pSubmits) {
- for (uint32_t index0=0; index0<submitCount; ++index0) {
+ for (uint32_t index0 = 0; index0 < submitCount; ++index0) {
if (pSubmits[index0].pWaitSemaphores) {
- for (uint32_t index1=0; index1<pSubmits[index0].waitSemaphoreCount; ++index1) {
- VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pWaitSemaphores);
- original_pWaitSemaphores.push_back(pSubmits[index0].pWaitSemaphores[index1]);
- *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pSubmits[index0].pWaitSemaphores[index1])->actualObject;
+ for (uint32_t index1 = 0;
+ index1 < pSubmits[index0].waitSemaphoreCount; ++index1) {
+ VkSemaphore **ppSemaphore =
+ (VkSemaphore **)&(pSubmits[index0].pWaitSemaphores);
+ original_pWaitSemaphores.push_back(
+ pSubmits[index0].pWaitSemaphores[index1]);
+ *(ppSemaphore[index1]) =
+ (VkSemaphore)((VkUniqueObject *)
+ pSubmits[index0].pWaitSemaphores[index1])
+ ->actualObject;
}
}
if (pSubmits[index0].pSignalSemaphores) {
- for (uint32_t index1=0; index1<pSubmits[index0].signalSemaphoreCount; ++index1) {
- VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pSignalSemaphores);
- original_pSignalSemaphores.push_back(pSubmits[index0].pSignalSemaphores[index1]);
- *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pSubmits[index0].pSignalSemaphores[index1])->actualObject;
+ for (uint32_t index1 = 0;
+ index1 < pSubmits[index0].signalSemaphoreCount; ++index1) {
+ VkSemaphore **ppSemaphore =
+ (VkSemaphore **)&(pSubmits[index0].pSignalSemaphores);
+ original_pSignalSemaphores.push_back(
+ pSubmits[index0].pSignalSemaphores[index1]);
+ *(ppSemaphore[index1]) =
+ (VkSemaphore)((VkUniqueObject *)pSubmits[index0]
+ .pSignalSemaphores[index1])
+ ->actualObject;
}
}
}
}
- VkResult result = get_dispatch_table(unique_objects_device_table_map, queue)->QueueSubmit(queue, submitCount, pSubmits, fence);
+ VkResult result = get_dispatch_table(unique_objects_device_table_map, queue)
+ ->QueueSubmit(queue, submitCount, pSubmits, fence);
if (pSubmits) {
- for (uint32_t index0=0; index0<submitCount; ++index0) {
+ for (uint32_t index0 = 0; index0 < submitCount; ++index0) {
if (pSubmits[index0].pWaitSemaphores) {
- for (uint32_t index1=0; index1<pSubmits[index0].waitSemaphoreCount; ++index1) {
- VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pWaitSemaphores);
+ for (uint32_t index1 = 0;
+ index1 < pSubmits[index0].waitSemaphoreCount; ++index1) {
+ VkSemaphore **ppSemaphore =
+ (VkSemaphore **)&(pSubmits[index0].pWaitSemaphores);
*(ppSemaphore[index1]) = original_pWaitSemaphores[index1];
}
}
if (pSubmits[index0].pSignalSemaphores) {
- for (uint32_t index1=0; index1<pSubmits[index0].signalSemaphoreCount; ++index1) {
- VkSemaphore** ppSemaphore = (VkSemaphore**)&(pSubmits[index0].pSignalSemaphores);
+ for (uint32_t index1 = 0;
+ index1 < pSubmits[index0].signalSemaphoreCount; ++index1) {
+ VkSemaphore **ppSemaphore =
+ (VkSemaphore **)&(pSubmits[index0].pSignalSemaphores);
*(ppSemaphore[index1]) = original_pSignalSemaphores[index1];
}
}
@@ -271,10 +332,17 @@
return result;
}
-VkResult explicit_QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
-{
-// UNWRAP USES:
-// 0 : pBindInfo[bindInfoCount]->pBufferBinds[bufferBindCount]->buffer,VkBuffer, pBindInfo[bindInfoCount]->pBufferBinds[bufferBindCount]->pBinds[bindCount]->memory,VkDeviceMemory, pBindInfo[bindInfoCount]->pImageOpaqueBinds[imageOpaqueBindCount]->image,VkImage, pBindInfo[bindInfoCount]->pImageOpaqueBinds[imageOpaqueBindCount]->pBinds[bindCount]->memory,VkDeviceMemory, pBindInfo[bindInfoCount]->pImageBinds[imageBindCount]->image,VkImage, pBindInfo[bindInfoCount]->pImageBinds[imageBindCount]->pBinds[bindCount]->memory,VkDeviceMemory
+VkResult explicit_QueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
+ const VkBindSparseInfo *pBindInfo,
+ VkFence fence) {
+ // UNWRAP USES:
+ // 0 :
+ // pBindInfo[bindInfoCount]->pBufferBinds[bufferBindCount]->buffer,VkBuffer,
+ // pBindInfo[bindInfoCount]->pBufferBinds[bufferBindCount]->pBinds[bindCount]->memory,VkDeviceMemory,
+ // pBindInfo[bindInfoCount]->pImageOpaqueBinds[imageOpaqueBindCount]->image,VkImage,
+ // pBindInfo[bindInfoCount]->pImageOpaqueBinds[imageOpaqueBindCount]->pBinds[bindCount]->memory,VkDeviceMemory,
+ // pBindInfo[bindInfoCount]->pImageBinds[imageBindCount]->image,VkImage,
+ // pBindInfo[bindInfoCount]->pImageBinds[imageBindCount]->pBinds[bindCount]->memory,VkDeviceMemory
std::vector<VkBuffer> original_buffer = {};
std::vector<VkDeviceMemory> original_memory1 = {};
std::vector<VkImage> original_image1 = {};
@@ -284,93 +352,203 @@
std::vector<VkSemaphore> original_pWaitSemaphores = {};
std::vector<VkSemaphore> original_pSignalSemaphores = {};
if (pBindInfo) {
- for (uint32_t index0=0; index0<bindInfoCount; ++index0) {
+ for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
if (pBindInfo[index0].pBufferBinds) {
- for (uint32_t index1=0; index1<pBindInfo[index0].bufferBindCount; ++index1) {
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].bufferBindCount; ++index1) {
if (pBindInfo[index0].pBufferBinds[index1].buffer) {
- VkBuffer* pBuffer = (VkBuffer*)&(pBindInfo[index0].pBufferBinds[index1].buffer);
- original_buffer.push_back(pBindInfo[index0].pBufferBinds[index1].buffer);
- *(pBuffer) = (VkBuffer)((VkUniqueObject*)pBindInfo[index0].pBufferBinds[index1].buffer)->actualObject;
+ VkBuffer *pBuffer = (VkBuffer *)&(
+ pBindInfo[index0].pBufferBinds[index1].buffer);
+ original_buffer.push_back(
+ pBindInfo[index0].pBufferBinds[index1].buffer);
+ *(pBuffer) =
+ (VkBuffer)((VkUniqueObject *)pBindInfo[index0]
+ .pBufferBinds[index1]
+ .buffer)->actualObject;
}
if (pBindInfo[index0].pBufferBinds[index1].pBinds) {
- for (uint32_t index2=0; index2<pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) {
- VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
- original_memory1.push_back(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
- *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory)->actualObject;
+ for (uint32_t index2 = 0;
+ index2 < pBindInfo[index0]
+ .pBufferBinds[index1]
+ .bindCount;
+ ++index2) {
+ if (pBindInfo[index0]
+ .pBufferBinds[index1]
+ .pBinds[index2]
+ .memory) {
+ VkDeviceMemory *pDeviceMemory =
+ (VkDeviceMemory *)&(
+ pBindInfo[index0]
+ .pBufferBinds[index1]
+ .pBinds[index2]
+ .memory);
+ original_memory1.push_back(
+ pBindInfo[index0]
+ .pBufferBinds[index1]
+ .pBinds[index2]
+ .memory);
+ *(pDeviceMemory) =
+ (VkDeviceMemory)(
+ (VkUniqueObject *)pBindInfo[index0]
+ .pBufferBinds[index1]
+ .pBinds[index2]
+ .memory)->actualObject;
}
}
}
}
}
if (pBindInfo[index0].pImageOpaqueBinds) {
- for (uint32_t index1=0; index1<pBindInfo[index0].imageOpaqueBindCount; ++index1) {
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].imageOpaqueBindCount;
+ ++index1) {
if (pBindInfo[index0].pImageOpaqueBinds[index1].image) {
- VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageOpaqueBinds[index1].image);
- original_image1.push_back(pBindInfo[index0].pImageOpaqueBinds[index1].image);
- *(pImage) = (VkImage)((VkUniqueObject*)pBindInfo[index0].pImageOpaqueBinds[index1].image)->actualObject;
+ VkImage *pImage = (VkImage *)&(
+ pBindInfo[index0].pImageOpaqueBinds[index1].image);
+ original_image1.push_back(
+ pBindInfo[index0].pImageOpaqueBinds[index1].image);
+ *(pImage) =
+ (VkImage)((VkUniqueObject *)pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .image)->actualObject;
}
if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
- for (uint32_t index2=0; index2<pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) {
- VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
- original_memory2.push_back(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
- *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory)->actualObject;
+ for (uint32_t index2 = 0;
+ index2 < pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .bindCount;
+ ++index2) {
+ if (pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .pBinds[index2]
+ .memory) {
+ VkDeviceMemory *pDeviceMemory =
+ (VkDeviceMemory *)&(
+ pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .pBinds[index2]
+ .memory);
+ original_memory2.push_back(
+ pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .pBinds[index2]
+ .memory);
+ *(pDeviceMemory) =
+ (VkDeviceMemory)(
+ (VkUniqueObject *)pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .pBinds[index2]
+ .memory)->actualObject;
}
}
}
}
}
if (pBindInfo[index0].pImageBinds) {
- for (uint32_t index1=0; index1<pBindInfo[index0].imageBindCount; ++index1) {
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].imageBindCount; ++index1) {
if (pBindInfo[index0].pImageBinds[index1].image) {
- VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageBinds[index1].image);
- original_image2.push_back(pBindInfo[index0].pImageBinds[index1].image);
- *(pImage) = (VkImage)((VkUniqueObject*)pBindInfo[index0].pImageBinds[index1].image)->actualObject;
+ VkImage *pImage = (VkImage *)&(
+ pBindInfo[index0].pImageBinds[index1].image);
+ original_image2.push_back(
+ pBindInfo[index0].pImageBinds[index1].image);
+ *(pImage) =
+ (VkImage)((VkUniqueObject *)pBindInfo[index0]
+ .pImageBinds[index1]
+ .image)->actualObject;
}
if (pBindInfo[index0].pImageBinds[index1].pBinds) {
- for (uint32_t index2=0; index2<pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) {
- VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
- original_memory3.push_back(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
- *(pDeviceMemory) = (VkDeviceMemory)((VkUniqueObject*)pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory)->actualObject;
+ for (
+ uint32_t index2 = 0;
+ index2 <
+ pBindInfo[index0].pImageBinds[index1].bindCount;
+ ++index2) {
+ if (pBindInfo[index0]
+ .pImageBinds[index1]
+ .pBinds[index2]
+ .memory) {
+ VkDeviceMemory *pDeviceMemory =
+ (VkDeviceMemory *)&(pBindInfo[index0]
+ .pImageBinds[index1]
+ .pBinds[index2]
+ .memory);
+ original_memory3.push_back(
+ pBindInfo[index0]
+ .pImageBinds[index1]
+ .pBinds[index2]
+ .memory);
+ *(pDeviceMemory) =
+ (VkDeviceMemory)(
+ (VkUniqueObject *)pBindInfo[index0]
+ .pImageBinds[index1]
+ .pBinds[index2]
+ .memory)->actualObject;
}
}
}
}
}
if (pBindInfo[index0].pWaitSemaphores) {
- for (uint32_t index1=0; index1<pBindInfo[index0].waitSemaphoreCount; ++index1) {
- VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pWaitSemaphores);
- original_pWaitSemaphores.push_back(pBindInfo[index0].pWaitSemaphores[index1]);
- *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pBindInfo[index0].pWaitSemaphores[index1])->actualObject;
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].waitSemaphoreCount; ++index1) {
+ VkSemaphore **ppSemaphore =
+ (VkSemaphore **)&(pBindInfo[index0].pWaitSemaphores);
+ original_pWaitSemaphores.push_back(
+ pBindInfo[index0].pWaitSemaphores[index1]);
+ *(ppSemaphore[index1]) =
+ (VkSemaphore)((VkUniqueObject *)
+ pBindInfo[index0].pWaitSemaphores[index1])
+ ->actualObject;
}
}
if (pBindInfo[index0].pSignalSemaphores) {
- for (uint32_t index1=0; index1<pBindInfo[index0].signalSemaphoreCount; ++index1) {
- VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pSignalSemaphores);
- original_pSignalSemaphores.push_back(pBindInfo[index0].pSignalSemaphores[index1]);
- *(ppSemaphore[index1]) = (VkSemaphore)((VkUniqueObject*)pBindInfo[index0].pSignalSemaphores[index1])->actualObject;
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].signalSemaphoreCount;
+ ++index1) {
+ VkSemaphore **ppSemaphore =
+ (VkSemaphore **)&(pBindInfo[index0].pSignalSemaphores);
+ original_pSignalSemaphores.push_back(
+ pBindInfo[index0].pSignalSemaphores[index1]);
+ *(ppSemaphore[index1]) =
+ (VkSemaphore)((VkUniqueObject *)pBindInfo[index0]
+ .pSignalSemaphores[index1])
+ ->actualObject;
}
}
}
}
if (VK_NULL_HANDLE != fence) {
- fence = (VkFence)((VkUniqueObject*)fence)->actualObject;
+ fence = (VkFence)((VkUniqueObject *)fence)->actualObject;
}
- VkResult result = get_dispatch_table(unique_objects_device_table_map, queue)->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+ VkResult result =
+ get_dispatch_table(unique_objects_device_table_map, queue)
+ ->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
if (pBindInfo) {
- for (uint32_t index0=0; index0<bindInfoCount; ++index0) {
+ for (uint32_t index0 = 0; index0 < bindInfoCount; ++index0) {
if (pBindInfo[index0].pBufferBinds) {
- for (uint32_t index1=0; index1<pBindInfo[index0].bufferBindCount; ++index1) {
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].bufferBindCount; ++index1) {
if (pBindInfo[index0].pBufferBinds[index1].buffer) {
- VkBuffer* pBuffer = (VkBuffer*)&(pBindInfo[index0].pBufferBinds[index1].buffer);
+ VkBuffer *pBuffer = (VkBuffer *)&(
+ pBindInfo[index0].pBufferBinds[index1].buffer);
*(pBuffer) = original_buffer[index1];
}
if (pBindInfo[index0].pBufferBinds[index1].pBinds) {
- for (uint32_t index2=0; index2<pBindInfo[index0].pBufferBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory) {
- VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pBufferBinds[index1].pBinds[index2].memory);
+ for (uint32_t index2 = 0;
+ index2 < pBindInfo[index0]
+ .pBufferBinds[index1]
+ .bindCount;
+ ++index2) {
+ if (pBindInfo[index0]
+ .pBufferBinds[index1]
+ .pBinds[index2]
+ .memory) {
+ VkDeviceMemory *pDeviceMemory =
+ (VkDeviceMemory *)&(
+ pBindInfo[index0]
+ .pBufferBinds[index1]
+ .pBinds[index2]
+ .memory);
*(pDeviceMemory) = original_memory1[index2];
}
}
@@ -378,15 +556,30 @@
}
}
if (pBindInfo[index0].pImageOpaqueBinds) {
- for (uint32_t index1=0; index1<pBindInfo[index0].imageOpaqueBindCount; ++index1) {
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].imageOpaqueBindCount;
+ ++index1) {
if (pBindInfo[index0].pImageOpaqueBinds[index1].image) {
- VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageOpaqueBinds[index1].image);
+ VkImage *pImage = (VkImage *)&(
+ pBindInfo[index0].pImageOpaqueBinds[index1].image);
*(pImage) = original_image1[index1];
}
if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds) {
- for (uint32_t index2=0; index2<pBindInfo[index0].pImageOpaqueBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory) {
- VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageOpaqueBinds[index1].pBinds[index2].memory);
+ for (uint32_t index2 = 0;
+ index2 < pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .bindCount;
+ ++index2) {
+ if (pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .pBinds[index2]
+ .memory) {
+ VkDeviceMemory *pDeviceMemory =
+ (VkDeviceMemory *)&(
+ pBindInfo[index0]
+ .pImageOpaqueBinds[index1]
+ .pBinds[index2]
+ .memory);
*(pDeviceMemory) = original_memory2[index2];
}
}
@@ -394,15 +587,28 @@
}
}
if (pBindInfo[index0].pImageBinds) {
- for (uint32_t index1=0; index1<pBindInfo[index0].imageBindCount; ++index1) {
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].imageBindCount; ++index1) {
if (pBindInfo[index0].pImageBinds[index1].image) {
- VkImage* pImage = (VkImage*)&(pBindInfo[index0].pImageBinds[index1].image);
+ VkImage *pImage = (VkImage *)&(
+ pBindInfo[index0].pImageBinds[index1].image);
*(pImage) = original_image2[index1];
}
if (pBindInfo[index0].pImageBinds[index1].pBinds) {
- for (uint32_t index2=0; index2<pBindInfo[index0].pImageBinds[index1].bindCount; ++index2) {
- if (pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory) {
- VkDeviceMemory* pDeviceMemory = (VkDeviceMemory*)&(pBindInfo[index0].pImageBinds[index1].pBinds[index2].memory);
+ for (
+ uint32_t index2 = 0;
+ index2 <
+ pBindInfo[index0].pImageBinds[index1].bindCount;
+ ++index2) {
+ if (pBindInfo[index0]
+ .pImageBinds[index1]
+ .pBinds[index2]
+ .memory) {
+ VkDeviceMemory *pDeviceMemory =
+ (VkDeviceMemory *)&(pBindInfo[index0]
+ .pImageBinds[index1]
+ .pBinds[index2]
+ .memory);
*(pDeviceMemory) = original_memory3[index2];
}
}
@@ -410,14 +616,19 @@
}
}
if (pBindInfo[index0].pWaitSemaphores) {
- for (uint32_t index1=0; index1<pBindInfo[index0].waitSemaphoreCount; ++index1) {
- VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pWaitSemaphores);
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].waitSemaphoreCount; ++index1) {
+ VkSemaphore **ppSemaphore =
+ (VkSemaphore **)&(pBindInfo[index0].pWaitSemaphores);
*(ppSemaphore[index1]) = original_pWaitSemaphores[index1];
}
}
if (pBindInfo[index0].pSignalSemaphores) {
- for (uint32_t index1=0; index1<pBindInfo[index0].signalSemaphoreCount; ++index1) {
- VkSemaphore** ppSemaphore = (VkSemaphore**)&(pBindInfo[index0].pSignalSemaphores);
+ for (uint32_t index1 = 0;
+ index1 < pBindInfo[index0].signalSemaphoreCount;
+ ++index1) {
+ VkSemaphore **ppSemaphore =
+ (VkSemaphore **)&(pBindInfo[index0].pSignalSemaphores);
*(ppSemaphore[index1]) = original_pSignalSemaphores[index1];
}
}
@@ -426,55 +637,79 @@
return result;
}
-VkResult explicit_CreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
-{
-// UNWRAP USES:
-// 0 : pipelineCache,VkPipelineCache, pCreateInfos[createInfoCount]->stage[0]->module,VkShaderModule, pCreateInfos[createInfoCount]->layout,VkPipelineLayout, pCreateInfos[createInfoCount]->basePipelineHandle,VkPipeline
+VkResult explicit_CreateComputePipelines(
+ VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+ const VkComputePipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
+ // UNWRAP USES:
+ // 0 : pipelineCache,VkPipelineCache,
+ // pCreateInfos[createInfoCount]->stage[0]->module,VkShaderModule,
+ // pCreateInfos[createInfoCount]->layout,VkPipelineLayout,
+ // pCreateInfos[createInfoCount]->basePipelineHandle,VkPipeline
if (VK_NULL_HANDLE != pipelineCache) {
- pipelineCache = (VkPipelineCache)((VkUniqueObject*)pipelineCache)->actualObject;
+ pipelineCache =
+ (VkPipelineCache)((VkUniqueObject *)pipelineCache)->actualObject;
}
std::vector<VkShaderModule> original_module = {};
std::vector<VkPipelineLayout> original_layout = {};
std::vector<VkPipeline> original_basePipelineHandle = {};
if (pCreateInfos) {
- for (uint32_t index0=0; index0<createInfoCount; ++index0) {
+ for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
if (pCreateInfos[index0].stage.module) {
- VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].stage.module);
+ VkShaderModule *pShaderModule =
+ (VkShaderModule *)&(pCreateInfos[index0].stage.module);
original_module.push_back(pCreateInfos[index0].stage.module);
- *(pShaderModule) = (VkShaderModule)((VkUniqueObject*)pCreateInfos[index0].stage.module)->actualObject;
+ *(pShaderModule) =
+ (VkShaderModule)(
+ (VkUniqueObject *)pCreateInfos[index0].stage.module)
+ ->actualObject;
}
if (pCreateInfos[index0].layout) {
- VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
+ VkPipelineLayout *pPipelineLayout =
+ (VkPipelineLayout *)&(pCreateInfos[index0].layout);
original_layout.push_back(pCreateInfos[index0].layout);
- *(pPipelineLayout) = (VkPipelineLayout)((VkUniqueObject*)pCreateInfos[index0].layout)->actualObject;
+ *(pPipelineLayout) =
+ (VkPipelineLayout)(
+ (VkUniqueObject *)pCreateInfos[index0].layout)
+ ->actualObject;
}
if (pCreateInfos[index0].basePipelineHandle) {
- VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
- original_basePipelineHandle.push_back(pCreateInfos[index0].basePipelineHandle);
- *(pPipeline) = (VkPipeline)((VkUniqueObject*)pCreateInfos[index0].basePipelineHandle)->actualObject;
+ VkPipeline *pPipeline =
+ (VkPipeline *)&(pCreateInfos[index0].basePipelineHandle);
+ original_basePipelineHandle.push_back(
+ pCreateInfos[index0].basePipelineHandle);
+ *(pPipeline) =
+ (VkPipeline)((VkUniqueObject *)pCreateInfos[index0]
+ .basePipelineHandle)->actualObject;
}
}
}
- VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->CreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+ VkResult result =
+ get_dispatch_table(unique_objects_device_table_map, device)
+ ->CreateComputePipelines(device, pipelineCache, createInfoCount,
+ pCreateInfos, pAllocator, pPipelines);
if (pCreateInfos) {
- for (uint32_t index0=0; index0<createInfoCount; ++index0) {
+ for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
if (pCreateInfos[index0].stage.module) {
- VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].stage.module);
+ VkShaderModule *pShaderModule =
+ (VkShaderModule *)&(pCreateInfos[index0].stage.module);
*(pShaderModule) = original_module[index0];
}
if (pCreateInfos[index0].layout) {
- VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
+ VkPipelineLayout *pPipelineLayout =
+ (VkPipelineLayout *)&(pCreateInfos[index0].layout);
*(pPipelineLayout) = original_layout[index0];
}
if (pCreateInfos[index0].basePipelineHandle) {
- VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
+ VkPipeline *pPipeline =
+ (VkPipeline *)&(pCreateInfos[index0].basePipelineHandle);
*(pPipeline) = original_basePipelineHandle[index0];
}
}
}
if (VK_SUCCESS == result) {
- VkUniqueObject* pUO = NULL;
- for (uint32_t i=0; i<createInfoCount; ++i) {
+ VkUniqueObject *pUO = NULL;
+ for (uint32_t i = 0; i < createInfoCount; ++i) {
pUO = new VkUniqueObject();
pUO->actualObject = (uint64_t)pPipelines[i];
pPipelines[i] = (VkPipeline)pUO;
@@ -483,73 +718,107 @@
return result;
}
-VkResult explicit_CreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
-{
-// UNWRAP USES:
-// 0 : pipelineCache,VkPipelineCache, pCreateInfos[createInfoCount]->pStages[stageCount]->module,VkShaderModule, pCreateInfos[createInfoCount]->layout,VkPipelineLayout, pCreateInfos[createInfoCount]->renderPass,VkRenderPass, pCreateInfos[createInfoCount]->basePipelineHandle,VkPipeline
+VkResult explicit_CreateGraphicsPipelines(
+ VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount,
+ const VkGraphicsPipelineCreateInfo *pCreateInfos,
+ const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines) {
+ // UNWRAP USES:
+ // 0 : pipelineCache,VkPipelineCache,
+ // pCreateInfos[createInfoCount]->pStages[stageCount]->module,VkShaderModule,
+ // pCreateInfos[createInfoCount]->layout,VkPipelineLayout,
+ // pCreateInfos[createInfoCount]->renderPass,VkRenderPass,
+ // pCreateInfos[createInfoCount]->basePipelineHandle,VkPipeline
if (VK_NULL_HANDLE != pipelineCache) {
- pipelineCache = (VkPipelineCache)((VkUniqueObject*)pipelineCache)->actualObject;
+ pipelineCache =
+ (VkPipelineCache)((VkUniqueObject *)pipelineCache)->actualObject;
}
std::vector<VkShaderModule> original_module = {};
std::vector<VkPipelineLayout> original_layout = {};
std::vector<VkRenderPass> original_renderPass = {};
std::vector<VkPipeline> original_basePipelineHandle = {};
if (pCreateInfos) {
- for (uint32_t index0=0; index0<createInfoCount; ++index0) {
+ for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
if (pCreateInfos[index0].pStages) {
- for (uint32_t index1=0; index1<pCreateInfos[index0].stageCount; ++index1) {
+ for (uint32_t index1 = 0;
+ index1 < pCreateInfos[index0].stageCount; ++index1) {
if (pCreateInfos[index0].pStages[index1].module) {
- VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].pStages[index1].module);
- original_module.push_back(pCreateInfos[index0].pStages[index1].module);
- *(pShaderModule) = (VkShaderModule)((VkUniqueObject*)pCreateInfos[index0].pStages[index1].module)->actualObject;
+ VkShaderModule *pShaderModule = (VkShaderModule *)&(
+ pCreateInfos[index0].pStages[index1].module);
+ original_module.push_back(
+ pCreateInfos[index0].pStages[index1].module);
+ *(pShaderModule) =
+ (VkShaderModule)(
+ (VkUniqueObject *)pCreateInfos[index0]
+ .pStages[index1]
+ .module)->actualObject;
}
}
}
if (pCreateInfos[index0].layout) {
- VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
+ VkPipelineLayout *pPipelineLayout =
+ (VkPipelineLayout *)&(pCreateInfos[index0].layout);
original_layout.push_back(pCreateInfos[index0].layout);
- *(pPipelineLayout) = (VkPipelineLayout)((VkUniqueObject*)pCreateInfos[index0].layout)->actualObject;
+ *(pPipelineLayout) =
+ (VkPipelineLayout)(
+ (VkUniqueObject *)pCreateInfos[index0].layout)
+ ->actualObject;
}
if (pCreateInfos[index0].renderPass) {
- VkRenderPass* pRenderPass = (VkRenderPass*)&(pCreateInfos[index0].renderPass);
+ VkRenderPass *pRenderPass =
+ (VkRenderPass *)&(pCreateInfos[index0].renderPass);
original_renderPass.push_back(pCreateInfos[index0].renderPass);
- *(pRenderPass) = (VkRenderPass)((VkUniqueObject*)pCreateInfos[index0].renderPass)->actualObject;
+ *(pRenderPass) =
+ (VkRenderPass)(
+ (VkUniqueObject *)pCreateInfos[index0].renderPass)
+ ->actualObject;
}
if (pCreateInfos[index0].basePipelineHandle) {
- VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
- original_basePipelineHandle.push_back(pCreateInfos[index0].basePipelineHandle);
- *(pPipeline) = (VkPipeline)((VkUniqueObject*)pCreateInfos[index0].basePipelineHandle)->actualObject;
+ VkPipeline *pPipeline =
+ (VkPipeline *)&(pCreateInfos[index0].basePipelineHandle);
+ original_basePipelineHandle.push_back(
+ pCreateInfos[index0].basePipelineHandle);
+ *(pPipeline) =
+ (VkPipeline)((VkUniqueObject *)pCreateInfos[index0]
+ .basePipelineHandle)->actualObject;
}
}
}
- VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->CreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+ VkResult result =
+ get_dispatch_table(unique_objects_device_table_map, device)
+ ->CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
+ pCreateInfos, pAllocator, pPipelines);
if (pCreateInfos) {
- for (uint32_t index0=0; index0<createInfoCount; ++index0) {
+ for (uint32_t index0 = 0; index0 < createInfoCount; ++index0) {
if (pCreateInfos[index0].pStages) {
- for (uint32_t index1=0; index1<pCreateInfos[index0].stageCount; ++index1) {
+ for (uint32_t index1 = 0;
+ index1 < pCreateInfos[index0].stageCount; ++index1) {
if (pCreateInfos[index0].pStages[index1].module) {
- VkShaderModule* pShaderModule = (VkShaderModule*)&(pCreateInfos[index0].pStages[index1].module);
+ VkShaderModule *pShaderModule = (VkShaderModule *)&(
+ pCreateInfos[index0].pStages[index1].module);
*(pShaderModule) = original_module[index1];
}
}
}
if (pCreateInfos[index0].layout) {
- VkPipelineLayout* pPipelineLayout = (VkPipelineLayout*)&(pCreateInfos[index0].layout);
+ VkPipelineLayout *pPipelineLayout =
+ (VkPipelineLayout *)&(pCreateInfos[index0].layout);
*(pPipelineLayout) = original_layout[index0];
}
if (pCreateInfos[index0].renderPass) {
- VkRenderPass* pRenderPass = (VkRenderPass*)&(pCreateInfos[index0].renderPass);
+ VkRenderPass *pRenderPass =
+ (VkRenderPass *)&(pCreateInfos[index0].renderPass);
*(pRenderPass) = original_renderPass[index0];
}
if (pCreateInfos[index0].basePipelineHandle) {
- VkPipeline* pPipeline = (VkPipeline*)&(pCreateInfos[index0].basePipelineHandle);
+ VkPipeline *pPipeline =
+ (VkPipeline *)&(pCreateInfos[index0].basePipelineHandle);
*(pPipeline) = original_basePipelineHandle[index0];
}
}
}
if (VK_SUCCESS == result) {
- VkUniqueObject* pUO = NULL;
- for (uint32_t i=0; i<createInfoCount; ++i) {
+ VkUniqueObject *pUO = NULL;
+ for (uint32_t i = 0; i < createInfoCount; ++i) {
pUO = new VkUniqueObject();
pUO->actualObject = (uint64_t)pPipelines[i];
pPipelines[i] = (VkPipeline)pUO;
@@ -558,19 +827,24 @@
return result;
}
-VkResult explicit_GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages)
-{
-// UNWRAP USES:
-// 0 : swapchain,VkSwapchainKHR, pSwapchainImages,VkImage
+VkResult explicit_GetSwapchainImagesKHR(VkDevice device,
+ VkSwapchainKHR swapchain,
+ uint32_t *pSwapchainImageCount,
+ VkImage *pSwapchainImages) {
+ // UNWRAP USES:
+ // 0 : swapchain,VkSwapchainKHR, pSwapchainImages,VkImage
if (VK_NULL_HANDLE != swapchain) {
- swapchain = (VkSwapchainKHR)((VkUniqueObject*)swapchain)->actualObject;
+ swapchain = (VkSwapchainKHR)((VkUniqueObject *)swapchain)->actualObject;
}
- VkResult result = get_dispatch_table(unique_objects_device_table_map, device)->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages);
+ VkResult result =
+ get_dispatch_table(unique_objects_device_table_map, device)
+ ->GetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount,
+ pSwapchainImages);
// TODO : Need to add corresponding code to delete these images
if (VK_SUCCESS == result) {
if ((*pSwapchainImageCount > 0) && pSwapchainImages) {
- std::vector<VkUniqueObject*> uniqueImages = {};
- for (uint32_t i=0; i<*pSwapchainImageCount; ++i) {
+ std::vector<VkUniqueObject *> uniqueImages = {};
+ for (uint32_t i = 0; i < *pSwapchainImageCount; ++i) {
uniqueImages.push_back(new VkUniqueObject());
uniqueImages[i]->actualObject = (uint64_t)pSwapchainImages[i];
pSwapchainImages[i] = (VkImage)uniqueImages[i];
diff --git a/layers/vk_layer_config.cpp b/layers/vk_layer_config.cpp
old mode 100755
new mode 100644
index 1d2cfc7..58f82f1
--- a/layers/vk_layer_config.cpp
+++ b/layers/vk_layer_config.cpp
@@ -37,16 +37,15 @@
#define MAX_CHARS_PER_LINE 4096
-class ConfigFile
-{
-public:
+class ConfigFile {
+ public:
ConfigFile();
~ConfigFile();
const char *getOption(const std::string &_option);
void setOption(const std::string &_option, const std::string &_val);
-private:
+ private:
bool m_fileIsParsed;
std::map<std::string, std::string> m_valueMap;
@@ -55,8 +54,7 @@
static ConfigFile g_configFileObj;
-static VkLayerDbgAction stringToDbgAction(const char *_enum)
-{
+static VkLayerDbgAction stringToDbgAction(const char *_enum) {
// only handles single enum values
if (!strcmp(_enum, "VK_DBG_LAYER_ACTION_IGNORE"))
return VK_DBG_LAYER_ACTION_IGNORE;
@@ -68,11 +66,10 @@
#endif
else if (!strcmp(_enum, "VK_DBG_LAYER_ACTION_BREAK"))
return VK_DBG_LAYER_ACTION_BREAK;
- return (VkLayerDbgAction) 0;
+ return (VkLayerDbgAction)0;
}
-static VkFlags stringToDbgReportFlags(const char *_enum)
-{
+static VkFlags stringToDbgReportFlags(const char *_enum) {
// only handles single enum values
if (!strcmp(_enum, "VK_DEBUG_REPORT_INFO"))
return VK_DEBUG_REPORT_INFO_BIT_EXT;
@@ -84,11 +81,10 @@
return VK_DEBUG_REPORT_ERROR_BIT_EXT;
else if (!strcmp(_enum, "VK_DEBUG_REPORT_DEBUG"))
return VK_DEBUG_REPORT_DEBUG_BIT_EXT;
- return (VkFlags) 0;
+ return (VkFlags)0;
}
-static unsigned int convertStringEnumVal(const char *_enum)
-{
+static unsigned int convertStringEnumVal(const char *_enum) {
unsigned int ret;
ret = stringToDbgAction(_enum);
@@ -98,31 +94,32 @@
return stringToDbgReportFlags(_enum);
}
-const char *getLayerOption(const char *_option)
-{
+const char *getLayerOption(const char *_option) {
return g_configFileObj.getOption(_option);
}
// If option is NULL or stdout, return stdout, otherwise try to open option
// as a filename. If successful, return file handle, otherwise stdout
-FILE* getLayerLogOutput(const char *_option, const char *layerName)
-{
- FILE* log_output = NULL;
+FILE *getLayerLogOutput(const char *_option, const char *layerName) {
+ FILE *log_output = NULL;
if (!_option || !strcmp("stdout", _option))
log_output = stdout;
else {
log_output = fopen(_option, "w");
if (log_output == NULL) {
if (_option)
- std::cout << std::endl << layerName << " ERROR: Bad output filename specified: " << _option << ". Writing to STDOUT instead" << std::endl << std::endl;
+ std::cout << std::endl << layerName
+ << " ERROR: Bad output filename specified: "
+ << _option << ". Writing to STDOUT instead"
+ << std::endl << std::endl;
log_output = stdout;
}
}
return log_output;
}
-VkDebugReportFlagsEXT getLayerOptionFlags(const char *_option, uint32_t optionDefault)
-{
+VkDebugReportFlagsEXT getLayerOptionFlags(const char *_option,
+ uint32_t optionDefault) {
VkDebugReportFlagsEXT flags = optionDefault;
const char *option = (g_configFileObj.getOption(_option));
@@ -158,8 +155,7 @@
return flags;
}
-bool getLayerOptionEnum(const char *_option, uint32_t *optionDefault)
-{
+bool getLayerOptionEnum(const char *_option, uint32_t *optionDefault) {
bool res;
const char *option = (g_configFileObj.getOption(_option));
if (option != NULL) {
@@ -171,32 +167,24 @@
return res;
}
-void setLayerOptionEnum(const char *_option, const char *_valEnum)
-{
+void setLayerOptionEnum(const char *_option, const char *_valEnum) {
unsigned int val = convertStringEnumVal(_valEnum);
char strVal[24];
snprintf(strVal, 24, "%u", val);
g_configFileObj.setOption(_option, strVal);
}
-void setLayerOption(const char *_option, const char *_val)
-{
+void setLayerOption(const char *_option, const char *_val) {
g_configFileObj.setOption(_option, _val);
}
-ConfigFile::ConfigFile() : m_fileIsParsed(false)
-{
-}
+ConfigFile::ConfigFile() : m_fileIsParsed(false) {}
-ConfigFile::~ConfigFile()
-{
-}
+ConfigFile::~ConfigFile() {}
-const char *ConfigFile::getOption(const std::string &_option)
-{
+const char *ConfigFile::getOption(const std::string &_option) {
std::map<std::string, std::string>::const_iterator it;
- if (!m_fileIsParsed)
- {
+ if (!m_fileIsParsed) {
parseFile("vk_layer_settings.txt");
}
@@ -206,18 +194,16 @@
return it->second.c_str();
}
-void ConfigFile::setOption(const std::string &_option, const std::string &_val)
-{
- if (!m_fileIsParsed)
- {
+void ConfigFile::setOption(const std::string &_option,
+ const std::string &_val) {
+ if (!m_fileIsParsed) {
parseFile("vk_layer_settings.txt");
}
m_valueMap[_option] = _val;
}
-void ConfigFile::parseFile(const char *filename)
-{
+void ConfigFile::parseFile(const char *filename) {
std::ifstream file;
char buf[MAX_CHARS_PER_LINE];
@@ -230,20 +216,18 @@
// read tokens from the file and form option, value pairs
file.getline(buf, MAX_CHARS_PER_LINE);
- while (!file.eof())
- {
+ while (!file.eof()) {
char option[512];
char value[512];
char *pComment;
- //discard any comments delimited by '#' in the line
+ // discard any comments delimited by '#' in the line
pComment = strchr(buf, '#');
if (pComment)
*pComment = '\0';
- if (sscanf(buf, " %511[^\n\t =] = %511[^\n \t]", option, value) == 2)
- {
+ if (sscanf(buf, " %511[^\n\t =] = %511[^\n \t]", option, value) == 2) {
std::string optStr(option);
std::string valStr(value);
m_valueMap[optStr] = valStr;
@@ -252,8 +236,7 @@
}
}
-void print_msg_flags(VkFlags msgFlags, char *msg_flags)
-{
+void print_msg_flags(VkFlags msgFlags, char *msg_flags) {
bool separator = false;
msg_flags[0] = 0;
@@ -262,23 +245,26 @@
separator = true;
}
if (msgFlags & VK_DEBUG_REPORT_INFO_BIT_EXT) {
- if (separator) strcat(msg_flags, ",");
+ if (separator)
+ strcat(msg_flags, ",");
strcat(msg_flags, "INFO");
separator = true;
}
if (msgFlags & VK_DEBUG_REPORT_WARN_BIT_EXT) {
- if (separator) strcat(msg_flags, ",");
+ if (separator)
+ strcat(msg_flags, ",");
strcat(msg_flags, "WARN");
separator = true;
}
if (msgFlags & VK_DEBUG_REPORT_PERF_WARN_BIT_EXT) {
- if (separator) strcat(msg_flags, ",");
+ if (separator)
+ strcat(msg_flags, ",");
strcat(msg_flags, "PERF");
separator = true;
}
if (msgFlags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
- if (separator) strcat(msg_flags, ",");
+ if (separator)
+ strcat(msg_flags, ",");
strcat(msg_flags, "ERROR");
}
}
-
diff --git a/layers/vk_layer_config.h b/layers/vk_layer_config.h
index a22841c..867b994 100644
--- a/layers/vk_layer_config.h
+++ b/layers/vk_layer_config.h
@@ -35,8 +35,9 @@
#endif
const char *getLayerOption(const char *_option);
-FILE* getLayerLogOutput(const char *_option, const char *layerName);
-VkDebugReportFlagsEXT getLayerOptionFlags(const char *_option, uint32_t optionDefault);
+FILE *getLayerLogOutput(const char *_option, const char *layerName);
+VkDebugReportFlagsEXT getLayerOptionFlags(const char *_option,
+ uint32_t optionDefault);
bool getLayerOptionEnum(const char *_option, uint32_t *optionDefault);
void setLayerOption(const char *_option, const char *_val);
diff --git a/layers/vk_layer_debug_marker_table.cpp b/layers/vk_layer_debug_marker_table.cpp
index 26e89b1..b19e537 100644
--- a/layers/vk_layer_debug_marker_table.cpp
+++ b/layers/vk_layer_debug_marker_table.cpp
@@ -31,36 +31,46 @@
#include <assert.h>
#include <unordered_map>
#include "vulkan/vk_debug_marker_layer.h"
-std::unordered_map<void *, VkLayerDebugMarkerDispatchTable *> tableDebugMarkerMap;
+std::unordered_map<void *, VkLayerDebugMarkerDispatchTable *>
+ tableDebugMarkerMap;
-/* Various dispatchable objects will use the same underlying dispatch table if they
+/* Various dispatchable objects will use the same underlying dispatch table if
+ * they
* are created from that "parent" object. Thus use pointer to dispatch table
* as the key to these table maps.
* Instance -> PhysicalDevice
* Device -> CommandBuffer or Queue
- * If use the object themselves as key to map then implies Create entrypoints have to be intercepted
+ * If use the object themselves as key to map then implies Create entrypoints
+ * have to be intercepted
* and a new key inserted into map */
-VkLayerDebugMarkerDispatchTable * initDebugMarkerTable(VkDevice device)
-{
+VkLayerDebugMarkerDispatchTable *initDebugMarkerTable(VkDevice device) {
VkLayerDebugMarkerDispatchTable *pDebugMarkerTable;
assert(device);
- VkLayerDispatchTable *pDisp = *(VkLayerDispatchTable **) device;
+ VkLayerDispatchTable *pDisp = *(VkLayerDispatchTable **)device;
- std::unordered_map<void *, VkLayerDebugMarkerDispatchTable *>::const_iterator it = tableDebugMarkerMap.find((void *) pDisp);
- if (it == tableDebugMarkerMap.end())
- {
+ std::unordered_map<void *,
+ VkLayerDebugMarkerDispatchTable *>::const_iterator it =
+ tableDebugMarkerMap.find((void *)pDisp);
+ if (it == tableDebugMarkerMap.end()) {
pDebugMarkerTable = new VkLayerDebugMarkerDispatchTable;
- tableDebugMarkerMap[(void *) pDisp] = pDebugMarkerTable;
- } else
- {
+ tableDebugMarkerMap[(void *)pDisp] = pDebugMarkerTable;
+ } else {
return it->second;
}
- pDebugMarkerTable->CmdDbgMarkerBegin = (PFN_vkCmdDbgMarkerBegin) pDisp->GetDeviceProcAddr(device, "vkCmdDbgMarkerBegin");
- pDebugMarkerTable->CmdDbgMarkerEnd = (PFN_vkCmdDbgMarkerEnd) pDisp->GetDeviceProcAddr(device, "vkCmdDbgMarkerEnd");
- pDebugMarkerTable->DbgSetObjectTag = (PFN_vkDbgSetObjectTag) pDisp->GetDeviceProcAddr(device, "vkDbgSetObjectTag");
- pDebugMarkerTable->DbgSetObjectName = (PFN_vkDbgSetObjectName) pDisp->GetDeviceProcAddr(device, "vkDbgSetObjectName");
+ pDebugMarkerTable->CmdDbgMarkerBegin =
+ (PFN_vkCmdDbgMarkerBegin)pDisp->GetDeviceProcAddr(
+ device, "vkCmdDbgMarkerBegin");
+ pDebugMarkerTable->CmdDbgMarkerEnd =
+ (PFN_vkCmdDbgMarkerEnd)pDisp->GetDeviceProcAddr(device,
+ "vkCmdDbgMarkerEnd");
+ pDebugMarkerTable->DbgSetObjectTag =
+ (PFN_vkDbgSetObjectTag)pDisp->GetDeviceProcAddr(device,
+ "vkDbgSetObjectTag");
+ pDebugMarkerTable->DbgSetObjectName =
+ (PFN_vkDbgSetObjectName)pDisp->GetDeviceProcAddr(device,
+ "vkDbgSetObjectName");
return pDebugMarkerTable;
}
diff --git a/layers/vk_layer_debug_marker_table.h b/layers/vk_layer_debug_marker_table.h
index 8684727..f128c5e 100644
--- a/layers/vk_layer_debug_marker_table.h
+++ b/layers/vk_layer_debug_marker_table.h
@@ -35,15 +35,19 @@
#include <cassert>
#include <unordered_map>
-extern std::unordered_map<void *, VkLayerDebugMarkerDispatchTable *> tableDebugMarkerMap;
-VkLayerDebugMarkerDispatchTable * initDebugMarkerTable(VkDevice dev);
+extern std::unordered_map<void *, VkLayerDebugMarkerDispatchTable *>
+ tableDebugMarkerMap;
+VkLayerDebugMarkerDispatchTable *initDebugMarkerTable(VkDevice dev);
// Map lookup must be thread safe
-static inline VkLayerDebugMarkerDispatchTable *debug_marker_dispatch_table(void* object)
-{
- VkLayerDebugMarkerDispatchTable *pDisp = *(VkLayerDebugMarkerDispatchTable **) object;
- std::unordered_map<void *, VkLayerDebugMarkerDispatchTable *>::const_iterator it = tableDebugMarkerMap.find((void *) pDisp);
- assert(it != tableDebugMarkerMap.end() && "Not able to find debug marker dispatch entry");
+static inline VkLayerDebugMarkerDispatchTable *
+debug_marker_dispatch_table(void *object) {
+ VkLayerDebugMarkerDispatchTable *pDisp =
+ *(VkLayerDebugMarkerDispatchTable **)object;
+ std::unordered_map<void *,
+ VkLayerDebugMarkerDispatchTable *>::const_iterator it =
+ tableDebugMarkerMap.find((void *)pDisp);
+ assert(it != tableDebugMarkerMap.end() &&
+ "Not able to find debug marker dispatch entry");
return it->second;
}
-
diff --git a/layers/vk_layer_extension_utils.cpp b/layers/vk_layer_extension_utils.cpp
index 748aa99..9a7d76b 100644
--- a/layers/vk_layer_extension_utils.cpp
+++ b/layers/vk_layer_extension_utils.cpp
@@ -39,11 +39,8 @@
*/
VkResult util_GetExtensionProperties(
- const uint32_t count,
- const VkExtensionProperties *layer_extensions,
- uint32_t* pCount,
- VkExtensionProperties* pProperties)
-{
+ const uint32_t count, const VkExtensionProperties *layer_extensions,
+ uint32_t *pCount, VkExtensionProperties *pProperties) {
uint32_t copy_size;
if (pProperties == NULL || layer_extensions == NULL) {
@@ -52,7 +49,8 @@
}
copy_size = *pCount < count ? *pCount : count;
- memcpy(pProperties, layer_extensions, copy_size * sizeof(VkExtensionProperties));
+ memcpy(pProperties, layer_extensions,
+ copy_size * sizeof(VkExtensionProperties));
*pCount = copy_size;
if (copy_size < count) {
return VK_INCOMPLETE;
@@ -61,12 +59,10 @@
return VK_SUCCESS;
}
-VkResult util_GetLayerProperties(
- const uint32_t count,
- const VkLayerProperties *layer_properties,
- uint32_t* pCount,
- VkLayerProperties* pProperties)
-{
+VkResult util_GetLayerProperties(const uint32_t count,
+ const VkLayerProperties *layer_properties,
+ uint32_t *pCount,
+ VkLayerProperties *pProperties) {
uint32_t copy_size;
if (pProperties == NULL || layer_properties == NULL) {
@@ -75,7 +71,8 @@
}
copy_size = *pCount < count ? *pCount : count;
- memcpy(pProperties, layer_properties, copy_size * sizeof(VkLayerProperties));
+ memcpy(pProperties, layer_properties,
+ copy_size * sizeof(VkLayerProperties));
*pCount = copy_size;
if (copy_size < count) {
return VK_INCOMPLETE;
diff --git a/layers/vk_layer_extension_utils.h b/layers/vk_layer_extension_utils.h
index ba70d68..716a59b 100644
--- a/layers/vk_layer_extension_utils.h
+++ b/layers/vk_layer_extension_utils.h
@@ -42,17 +42,13 @@
extern "C" {
VkResult util_GetExtensionProperties(
- const uint32_t count,
- const VkExtensionProperties *layer_extensions,
- uint32_t* pCount,
- VkExtensionProperties* pProperties);
+ const uint32_t count, const VkExtensionProperties *layer_extensions,
+ uint32_t *pCount, VkExtensionProperties *pProperties);
-VkResult util_GetLayerProperties(
- const uint32_t count,
- const VkLayerProperties *layer_properties,
- uint32_t* pCount,
- VkLayerProperties* pProperties);
+VkResult util_GetLayerProperties(const uint32_t count,
+ const VkLayerProperties *layer_properties,
+ uint32_t *pCount,
+ VkLayerProperties *pProperties);
} // extern "C"
#endif // LAYER_EXTENSION_UTILS_H
-
diff --git a/layers/vk_layer_table.cpp b/layers/vk_layer_table.cpp
index 3cee6dd..57f2989 100644
--- a/layers/vk_layer_table.cpp
+++ b/layers/vk_layer_table.cpp
@@ -38,144 +38,170 @@
#define DISPATCH_MAP_DEBUG 0
// Map lookup must be thread safe
-VkLayerDispatchTable *device_dispatch_table(void* object)
-{
+VkLayerDispatchTable *device_dispatch_table(void *object) {
dispatch_key key = get_dispatch_key(object);
- device_table_map::const_iterator it = tableMap.find((void *) key);
+ device_table_map::const_iterator it = tableMap.find((void *)key);
assert(it != tableMap.end() && "Not able to find device dispatch entry");
return it->second;
}
-VkLayerInstanceDispatchTable *instance_dispatch_table(void* object)
-{
+VkLayerInstanceDispatchTable *instance_dispatch_table(void *object) {
dispatch_key key = get_dispatch_key(object);
- instance_table_map::const_iterator it = tableInstanceMap.find((void *) key);
+ instance_table_map::const_iterator it = tableInstanceMap.find((void *)key);
#if DISPATCH_MAP_DEBUG
if (it != tableInstanceMap.end()) {
- fprintf(stderr, "instance_dispatch_table: map: %p, object: %p, key: %p, table: %p\n", &tableInstanceMap, object, key, it->second);
+ fprintf(stderr, "instance_dispatch_table: map: %p, object: %p, key: "
+ "%p, table: %p\n",
+ &tableInstanceMap, object, key, it->second);
} else {
- fprintf(stderr, "instance_dispatch_table: map: %p, object: %p, key: %p, table: UNKNOWN\n", &tableInstanceMap, object, key);
+ fprintf(stderr, "instance_dispatch_table: map: %p, object: %p, key: "
+ "%p, table: UNKNOWN\n",
+ &tableInstanceMap, object, key);
}
#endif
- assert(it != tableInstanceMap.end() && "Not able to find instance dispatch entry");
+ assert(it != tableInstanceMap.end() &&
+ "Not able to find instance dispatch entry");
return it->second;
}
-void destroy_dispatch_table(device_table_map &map, dispatch_key key)
-{
- device_table_map::const_iterator it = map.find((void *) key);
+void destroy_dispatch_table(device_table_map &map, dispatch_key key) {
+ device_table_map::const_iterator it = map.find((void *)key);
#if DISPATCH_MAP_DEBUG
if (it != map.end()) {
- fprintf(stderr, "destroy device dispatch_table: map: %p, key: %p, table: %p\n", &map, key, it->second);
+ fprintf(stderr,
+ "destroy device dispatch_table: map: %p, key: %p, table: %p\n",
+ &map, key, it->second);
} else {
- fprintf(stderr, "destroy device dispatch table: map: %p, key: %p, table: UNKNOWN\n", &map, key);
+ fprintf(
+ stderr,
+ "destroy device dispatch table: map: %p, key: %p, table: UNKNOWN\n",
+ &map, key);
assert(it != map.end());
}
#endif
map.erase(key);
}
-void destroy_dispatch_table(instance_table_map &map, dispatch_key key)
-{
- instance_table_map::const_iterator it = map.find((void *) key);
+void destroy_dispatch_table(instance_table_map &map, dispatch_key key) {
+ instance_table_map::const_iterator it = map.find((void *)key);
#if DISPATCH_MAP_DEBUG
if (it != map.end()) {
- fprintf(stderr, "destroy instance dispatch_table: map: %p, key: %p, table: %p\n", &map, key, it->second);
+ fprintf(
+ stderr,
+ "destroy instance dispatch_table: map: %p, key: %p, table: %p\n",
+ &map, key, it->second);
} else {
- fprintf(stderr, "destroy instance dispatch table: map: %p, key: %p, table: UNKNOWN\n", &map, key);
+ fprintf(stderr, "destroy instance dispatch table: map: %p, key: %p, "
+ "table: UNKNOWN\n",
+ &map, key);
assert(it != map.end());
}
#endif
map.erase(key);
}
-void destroy_device_dispatch_table(dispatch_key key)
-{
+void destroy_device_dispatch_table(dispatch_key key) {
destroy_dispatch_table(tableMap, key);
}
-void destroy_instance_dispatch_table(dispatch_key key)
-{
+void destroy_instance_dispatch_table(dispatch_key key) {
destroy_dispatch_table(tableInstanceMap, key);
}
-VkLayerDispatchTable *get_dispatch_table(device_table_map &map, void* object)
-{
+VkLayerDispatchTable *get_dispatch_table(device_table_map &map, void *object) {
dispatch_key key = get_dispatch_key(object);
- device_table_map::const_iterator it = map.find((void *) key);
+ device_table_map::const_iterator it = map.find((void *)key);
#if DISPATCH_MAP_DEBUG
if (it != map.end()) {
- fprintf(stderr, "device_dispatch_table: map: %p, object: %p, key: %p, table: %p\n", &tableInstanceMap, object, key, it->second);
+ fprintf(
+ stderr,
+ "device_dispatch_table: map: %p, object: %p, key: %p, table: %p\n",
+ &tableInstanceMap, object, key, it->second);
} else {
- fprintf(stderr, "device_dispatch_table: map: %p, object: %p, key: %p, table: UNKNOWN\n", &tableInstanceMap, object, key);
+ fprintf(stderr, "device_dispatch_table: map: %p, object: %p, key: %p, "
+ "table: UNKNOWN\n",
+ &tableInstanceMap, object, key);
}
#endif
assert(it != map.end() && "Not able to find device dispatch entry");
return it->second;
}
-VkLayerInstanceDispatchTable *get_dispatch_table(instance_table_map &map, void* object)
-{
-// VkLayerInstanceDispatchTable *pDisp = *(VkLayerInstanceDispatchTable **) object;
+VkLayerInstanceDispatchTable *get_dispatch_table(instance_table_map &map,
+ void *object) {
+ // VkLayerInstanceDispatchTable *pDisp = *(VkLayerInstanceDispatchTable
+ // **) object;
dispatch_key key = get_dispatch_key(object);
- instance_table_map::const_iterator it = map.find((void *) key);
+ instance_table_map::const_iterator it = map.find((void *)key);
#if DISPATCH_MAP_DEBUG
if (it != map.end()) {
- fprintf(stderr, "instance_dispatch_table: map: %p, object: %p, key: %p, table: %p\n", &tableInstanceMap, object, key, it->second);
+ fprintf(stderr, "instance_dispatch_table: map: %p, object: %p, key: "
+ "%p, table: %p\n",
+ &tableInstanceMap, object, key, it->second);
} else {
- fprintf(stderr, "instance_dispatch_table: map: %p, object: %p, key: %p, table: UNKNOWN\n", &tableInstanceMap, object, key);
+ fprintf(stderr, "instance_dispatch_table: map: %p, object: %p, key: "
+ "%p, table: UNKNOWN\n",
+ &tableInstanceMap, object, key);
}
#endif
assert(it != map.end() && "Not able to find instance dispatch entry");
return it->second;
}
-VkLayerInstanceCreateInfo *get_chain_info(const VkInstanceCreateInfo *pCreateInfo, VkLayerFunction func)
-{
- VkLayerInstanceCreateInfo *chain_info = (VkLayerInstanceCreateInfo *) pCreateInfo->pNext;
- while (chain_info && !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
- && chain_info->function == func)) {
- chain_info = (VkLayerInstanceCreateInfo *) chain_info->pNext;
+VkLayerInstanceCreateInfo *
+get_chain_info(const VkInstanceCreateInfo *pCreateInfo, VkLayerFunction func) {
+ VkLayerInstanceCreateInfo *chain_info =
+ (VkLayerInstanceCreateInfo *)pCreateInfo->pNext;
+ while (
+ chain_info &&
+ !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO &&
+ chain_info->function == func)) {
+ chain_info = (VkLayerInstanceCreateInfo *)chain_info->pNext;
}
assert(chain_info != NULL);
return chain_info;
}
-VkLayerDeviceCreateInfo *get_chain_info(const VkDeviceCreateInfo *pCreateInfo, VkLayerFunction func)
-{
- VkLayerDeviceCreateInfo *chain_info = (VkLayerDeviceCreateInfo *) pCreateInfo->pNext;
- while (chain_info && !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
- && chain_info->function == func)) {
- chain_info = (VkLayerDeviceCreateInfo *) chain_info->pNext;
+VkLayerDeviceCreateInfo *get_chain_info(const VkDeviceCreateInfo *pCreateInfo,
+ VkLayerFunction func) {
+ VkLayerDeviceCreateInfo *chain_info =
+ (VkLayerDeviceCreateInfo *)pCreateInfo->pNext;
+ while (chain_info &&
+ !(chain_info->sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO &&
+ chain_info->function == func)) {
+ chain_info = (VkLayerDeviceCreateInfo *)chain_info->pNext;
}
assert(chain_info != NULL);
return chain_info;
}
-/* Various dispatchable objects will use the same underlying dispatch table if they
+/* Various dispatchable objects will use the same underlying dispatch table if
+ * they
* are created from that "parent" object. Thus use pointer to dispatch table
* as the key to these table maps.
* Instance -> PhysicalDevice
* Device -> CommandBuffer or Queue
- * If use the object themselves as key to map then implies Create entrypoints have to be intercepted
+ * If use the object themselves as key to map then implies Create entrypoints
+ * have to be intercepted
* and a new key inserted into map */
-VkLayerInstanceDispatchTable * initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa, instance_table_map &map)
-{
+VkLayerInstanceDispatchTable *
+initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa,
+ instance_table_map &map) {
VkLayerInstanceDispatchTable *pTable;
dispatch_key key = get_dispatch_key(instance);
- instance_table_map::const_iterator it = map.find((void *) key);
+ instance_table_map::const_iterator it = map.find((void *)key);
- if (it == map.end())
- {
- pTable = new VkLayerInstanceDispatchTable;
- map[(void *) key] = pTable;
+ if (it == map.end()) {
+ pTable = new VkLayerInstanceDispatchTable;
+ map[(void *)key] = pTable;
#if DISPATCH_MAP_DEBUG
- fprintf(stderr, "New, Instance: map: %p, key: %p, table: %p\n", &map, key, pTable);
+ fprintf(stderr, "New, Instance: map: %p, key: %p, table: %p\n", &map,
+ key, pTable);
#endif
- } else
- {
+ } else {
#if DISPATCH_MAP_DEBUG
- fprintf(stderr, "Instance: map: %p, key: %p, table: %p\n", &map, key, it->second);
+ fprintf(stderr, "Instance: map: %p, key: %p, table: %p\n", &map, key,
+ it->second);
#endif
return it->second;
}
@@ -185,28 +211,29 @@
return pTable;
}
-VkLayerInstanceDispatchTable * initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa)
-{
+VkLayerInstanceDispatchTable *
+initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa) {
return initInstanceTable(instance, gpa, tableInstanceMap);
}
-VkLayerDispatchTable * initDeviceTable(VkDevice device, const PFN_vkGetDeviceProcAddr gpa, device_table_map &map)
-{
+VkLayerDispatchTable *initDeviceTable(VkDevice device,
+ const PFN_vkGetDeviceProcAddr gpa,
+ device_table_map &map) {
VkLayerDispatchTable *pTable;
dispatch_key key = get_dispatch_key(device);
- device_table_map::const_iterator it = map.find((void *) key);
+ device_table_map::const_iterator it = map.find((void *)key);
- if (it == map.end())
- {
- pTable = new VkLayerDispatchTable;
- map[(void *) key] = pTable;
+ if (it == map.end()) {
+ pTable = new VkLayerDispatchTable;
+ map[(void *)key] = pTable;
#if DISPATCH_MAP_DEBUG
- fprintf(stderr, "New, Device: map: %p, key: %p, table: %p\n", &map, key, pTable);
+ fprintf(stderr, "New, Device: map: %p, key: %p, table: %p\n", &map, key,
+ pTable);
#endif
- } else
- {
+ } else {
#if DISPATCH_MAP_DEBUG
- fprintf(stderr, "Device: map: %p, key: %p, table: %p\n", &map, key, it->second);
+ fprintf(stderr, "Device: map: %p, key: %p, table: %p\n", &map, key,
+ it->second);
#endif
return it->second;
}
@@ -216,7 +243,7 @@
return pTable;
}
-VkLayerDispatchTable * initDeviceTable(VkDevice device, const PFN_vkGetDeviceProcAddr gpa)
-{
+VkLayerDispatchTable *initDeviceTable(VkDevice device,
+ const PFN_vkGetDeviceProcAddr gpa) {
return initDeviceTable(device, gpa, tableMap);
}
diff --git a/layers/vk_layer_table.h b/layers/vk_layer_table.h
index d51108d..e68680d 100644
--- a/layers/vk_layer_table.h
+++ b/layers/vk_layer_table.h
@@ -34,30 +34,38 @@
#include <unordered_map>
typedef std::unordered_map<void *, VkLayerDispatchTable *> device_table_map;
-typedef std::unordered_map<void *, VkLayerInstanceDispatchTable *> instance_table_map;
-VkLayerDispatchTable * initDeviceTable(VkDevice device, const PFN_vkGetDeviceProcAddr gpa, device_table_map &map);
-VkLayerDispatchTable * initDeviceTable(VkDevice device, const PFN_vkGetDeviceProcAddr gpa);
-VkLayerInstanceDispatchTable * initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa, instance_table_map &map);
-VkLayerInstanceDispatchTable * initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa);
-
+typedef std::unordered_map<void *, VkLayerInstanceDispatchTable *>
+ instance_table_map;
+VkLayerDispatchTable *initDeviceTable(VkDevice device,
+ const PFN_vkGetDeviceProcAddr gpa,
+ device_table_map &map);
+VkLayerDispatchTable *initDeviceTable(VkDevice device,
+ const PFN_vkGetDeviceProcAddr gpa);
+VkLayerInstanceDispatchTable *
+initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa,
+ instance_table_map &map);
+VkLayerInstanceDispatchTable *
+initInstanceTable(VkInstance instance, const PFN_vkGetInstanceProcAddr gpa);
typedef void *dispatch_key;
-static inline dispatch_key get_dispatch_key(const void* object)
-{
- return (dispatch_key) *(VkLayerDispatchTable **) object;
+static inline dispatch_key get_dispatch_key(const void *object) {
+ return (dispatch_key) * (VkLayerDispatchTable **)object;
}
-VkLayerDispatchTable *device_dispatch_table(void* object);
+VkLayerDispatchTable *device_dispatch_table(void *object);
-VkLayerInstanceDispatchTable *instance_dispatch_table(void* object);
+VkLayerInstanceDispatchTable *instance_dispatch_table(void *object);
-VkLayerDispatchTable *get_dispatch_table(device_table_map &map, void* object);
+VkLayerDispatchTable *get_dispatch_table(device_table_map &map, void *object);
-VkLayerInstanceDispatchTable *get_dispatch_table(instance_table_map &map, void* object);
+VkLayerInstanceDispatchTable *get_dispatch_table(instance_table_map &map,
+ void *object);
-VkLayerInstanceCreateInfo *get_chain_info(const VkInstanceCreateInfo *pCreateInfo, VkLayerFunction func);
-VkLayerDeviceCreateInfo *get_chain_info(const VkDeviceCreateInfo *pCreateInfo, VkLayerFunction func);
+VkLayerInstanceCreateInfo *
+get_chain_info(const VkInstanceCreateInfo *pCreateInfo, VkLayerFunction func);
+VkLayerDeviceCreateInfo *get_chain_info(const VkDeviceCreateInfo *pCreateInfo,
+ VkLayerFunction func);
void destroy_device_dispatch_table(dispatch_key key);
void destroy_instance_dispatch_table(dispatch_key key);
diff --git a/layers/vk_layer_utils.cpp b/layers/vk_layer_utils.cpp
index db433dc..dd59186 100644
--- a/layers/vk_layer_utils.cpp
+++ b/layers/vk_layer_utils.cpp
@@ -34,215 +34,349 @@
#include "vulkan/vulkan.h"
#include "vk_layer_utils.h"
-
typedef struct _VULKAN_FORMAT_INFO {
- size_t size;
- uint32_t channel_count;
- VkFormatCompatibilityClass format_class;
+ size_t size;
+ uint32_t channel_count;
+ VkFormatCompatibilityClass format_class;
} VULKAN_FORMAT_INFO;
-
// Set up data structure with number of bytes and number of channels
// for each Vulkan format.
static const VULKAN_FORMAT_INFO vk_format_table[VK_FORMAT_RANGE_SIZE] = {
- { 0, 0, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }, // [VK_FORMAT_UNDEFINED]
- { 1, 2, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT }, // [VK_FORMAT_R4G4_UNORM_PACK8]
- { 2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R4G4B4A4_UNORM_PACK16]
- { 2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_B4G4R4A4_UNORM_PACK16]
- { 2, 3, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R5G6B5_UNORM_PACK16]
- { 2, 3, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_B5G6R5_UNORM_PACK16]
- { 2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R5G5B5A1_UNORM_PACK16]
- { 2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_B5G5R5A1_UNORM_PACK16]
- { 2, 4, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_A1R5G5B5_UNORM_PACK16]
- { 1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT }, // [VK_FORMAT_R8_UNORM]
- { 1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT }, // [VK_FORMAT_R8_SNORM]
- { 1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT }, // [VK_FORMAT_R8_USCALED]
- { 1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT }, // [VK_FORMAT_R8_SSCALED]
- { 1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT }, // [VK_FORMAT_R8_UINT]
- { 1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT }, // [VK_FORMAT_R8_SINT]
- { 1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT }, // [VK_FORMAT_R8_SRGB]
- { 2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R8G8_UNORM]
- { 2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R8G8_SNORM]
- { 2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R8G8_USCALED]
- { 2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R8G8_SSCALED]
- { 2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R8G8_UINT]
- { 2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R8G8_SINT]
- { 2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R8G8_SRGB]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_R8G8B8_UNORM]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_R8G8B8_SNORM]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_R8G8B8_USCALED]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_R8G8B8_SSCALED]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_R8G8B8_UINT]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_R8G8B8_SINT]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_R8G8B8_SRGB]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_B8G8R8_UNORM]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_B8G8R8_SNORM]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_B8G8R8_USCALED]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_B8G8R8_SSCALED]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_B8G8R8_UINT]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_B8G8R8_SINT]
- { 3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT }, // [VK_FORMAT_B8G8R8_SRGB]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R8G8B8A8_UNORM]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R8G8B8A8_SNORM]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R8G8B8A8_USCALED]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R8G8B8A8_SSCALED]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R8G8B8A8_UINT]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R8G8B8A8_SINT]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R8G8B8A8_SRGB]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B8G8R8A8_UNORM]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B8G8R8A8_SNORM]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B8G8R8A8_USCALED]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B8G8R8A8_SSCALED]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B8G8R8A8_UINT]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B8G8R8A8_SINT]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B8G8R8A8_SRGB]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A8B8G8R8_UNORM_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A8B8G8R8_SNORM_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A8B8G8R8_USCALED_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A8B8G8R8_SSCALED_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A8B8G8R8_UINT_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A8B8G8R8_SINT_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B8G8R8A8_SRGB_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2R10G10B10_UNORM_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2R10G10B10_SNORM_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2R10G10B10_USCALED_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2R10G10B10_SSCALED_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2R10G10B10_UINT_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2R10G10B10_SINT_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2B10G10R10_UNORM_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2B10G10R10_SNORM_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2B10G10R10_USCALED_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2B10G10R10_SSCALED_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2B10G10R10_UINT_PACK32]
- { 4, 4, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_A2B10G10R10_SINT_PACK32]
- { 2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R16_UNORM]
- { 2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R16_SNORM]
- { 2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R16_USCALED]
- { 2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R16_SSCALED]
- { 2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R16_UINT]
- { 2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R16_SINT]
- { 2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT }, // [VK_FORMAT_R16_SFLOAT]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R16G16_UNORM]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R16G16_SNORM]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R16G16_USCALED]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R16G16_SSCALED]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R16G16_UINT]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R16G16_SINT]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R16G16_SFLOAT]
- { 6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT }, // [VK_FORMAT_R16G16B16_UNORM]
- { 6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT }, // [VK_FORMAT_R16G16B16_SNORM]
- { 6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT }, // [VK_FORMAT_R16G16B16_USCALED]
- { 6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT }, // [VK_FORMAT_R16G16B16_SSCALED]
- { 6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT }, // [VK_FORMAT_R16G16B16_UINT]
- { 6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT }, // [VK_FORMAT_R16G16B16_SINT]
- { 6, 3, VK_FORMAT_COMPATIBILITY_CLASS_48_BIT }, // [VK_FORMAT_R16G16B16_SFLOAT]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R16G16B16A16_UNORM]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R16G16B16A16_SNORM]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R16G16B16A16_USCALED]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R16G16B16A16_SSCALED]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R16G16B16A16_UINT]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R16G16B16A16_SINT]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R16G16B16A16_SFLOAT]
- { 4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R32_UINT]
- { 4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R32_SINT]
- { 4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_R32_SFLOAT]
- { 8, 2, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R32G32_UINT]
- { 8, 2, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R32G32_SINT]
- { 8, 2, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R32G32_SFLOAT]
- { 12, 3, VK_FORMAT_COMPATIBILITY_CLASS_96_BIT }, // [VK_FORMAT_R32G32B32_UINT]
- { 12, 3, VK_FORMAT_COMPATIBILITY_CLASS_96_BIT }, // [VK_FORMAT_R32G32B32_SINT]
- { 12, 3, VK_FORMAT_COMPATIBILITY_CLASS_96_BIT }, // [VK_FORMAT_R32G32B32_SFLOAT]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT }, // [VK_FORMAT_R32G32B32A32_UINT]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT }, // [VK_FORMAT_R32G32B32A32_SINT]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT }, // [VK_FORMAT_R32G32B32A32_SFLOAT]
- { 8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R64_UINT]
- { 8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R64_SINT]
- { 8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT }, // [VK_FORMAT_R64_SFLOAT]
- { 16, 2, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT }, // [VK_FORMAT_R64G64_UINT]
- { 16, 2, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT }, // [VK_FORMAT_R64G64_SINT]
- { 16, 2, VK_FORMAT_COMPATIBILITY_CLASS_128_BIT }, // [VK_FORMAT_R64G64_SFLOAT]
- { 24, 3, VK_FORMAT_COMPATIBILITY_CLASS_192_BIT }, // [VK_FORMAT_R64G64B64_UINT]
- { 24, 3, VK_FORMAT_COMPATIBILITY_CLASS_192_BIT }, // [VK_FORMAT_R64G64B64_SINT]
- { 24, 3, VK_FORMAT_COMPATIBILITY_CLASS_192_BIT }, // [VK_FORMAT_R64G64B64_SFLOAT]
- { 32, 4, VK_FORMAT_COMPATIBILITY_CLASS_256_BIT }, // [VK_FORMAT_R64G64B64A64_UINT]
- { 32, 4, VK_FORMAT_COMPATIBILITY_CLASS_256_BIT }, // [VK_FORMAT_R64G64B64A64_SINT]
- { 32, 4, VK_FORMAT_COMPATIBILITY_CLASS_256_BIT }, // [VK_FORMAT_R64G64B64A64_SFLOAT]
- { 4, 3, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_B10G11R11_UFLOAT_PACK32]
- { 4, 3, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT }, // [VK_FORMAT_E5B9G9R9_UFLOAT_PACK32]
- { 2, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }, // [VK_FORMAT_D16_UNORM]
- { 3, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }, // [VK_FORMAT_X8_D24_UNORM_PACK32]
- { 4, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }, // [VK_FORMAT_D32_SFLOAT]
- { 1, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }, // [VK_FORMAT_S8_UINT]
- { 3, 2, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }, // [VK_FORMAT_D16_UNORM_S8_UINT]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }, // [VK_FORMAT_D24_UNORM_S8_UINT]
- { 4, 2, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT }, // [VK_FORMAT_D32_SFLOAT_S8_UINT]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT }, // [VK_FORMAT_BC1_RGB_UNORM_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT }, // [VK_FORMAT_BC1_RGB_SRGB_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT }, // [VK_FORMAT_BC1_RGBA_UNORM_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT }, // [VK_FORMAT_BC1_RGBA_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT }, // [VK_FORMAT_BC2_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT }, // [VK_FORMAT_BC2_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT }, // [VK_FORMAT_BC3_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT }, // [VK_FORMAT_BC3_SRGB_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT }, // [VK_FORMAT_BC4_UNORM_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT }, // [VK_FORMAT_BC4_SNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT }, // [VK_FORMAT_BC5_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT }, // [VK_FORMAT_BC5_SNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT }, // [VK_FORMAT_BC6H_UFLOAT_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT }, // [VK_FORMAT_BC6H_SFLOAT_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT }, // [VK_FORMAT_BC7_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT }, // [VK_FORMAT_BC7_SRGB_BLOCK]
- { 8, 3, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT }, // [VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK]
- { 8, 3, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT }, // [VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT }, // [VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT }, // [VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT }, // [VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK]
- { 8, 4, VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT }, // [VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK]
- { 8, 1, VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT }, // [VK_FORMAT_EAC_R11_UNORM_BLOCK]
- { 8, 1, VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT }, // [VK_FORMAT_EAC_R11_SNORM_BLOCK]
- { 16, 2, VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT }, // [VK_FORMAT_EAC_R11G11_UNORM_BLOCK]
- { 16, 2, VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT }, // [VK_FORMAT_EAC_R11G11_SNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT }, // [VK_FORMAT_ASTC_4x4_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT }, // [VK_FORMAT_ASTC_4x4_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT }, // [VK_FORMAT_ASTC_5x4_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT }, // [VK_FORMAT_ASTC_5x4_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT }, // [VK_FORMAT_ASTC_5x5_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT }, // [VK_FORMAT_ASTC_5x5_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT }, // [VK_FORMAT_ASTC_6x5_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT }, // [VK_FORMAT_ASTC_6x5_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT }, // [VK_FORMAT_ASTC_6x6_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT }, // [VK_FORMAT_ASTC_6x6_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT }, // [VK_FORMAT_ASTC_8x5_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT }, // [VK_FORMAT_ASTC_8x5_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT }, // [VK_FORMAT_ASTC_8x6_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT }, // [VK_FORMAT_ASTC_8x6_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT }, // [VK_FORMAT_ASTC_8x8_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT }, // [VK_FORMAT_ASTC_8x8_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT }, // [VK_FORMAT_ASTC_10x5_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT }, // [VK_FORMAT_ASTC_10x5_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT }, // [VK_FORMAT_ASTC_10x6_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT }, // [VK_FORMAT_ASTC_10x6_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT }, // [VK_FORMAT_ASTC_10x8_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT }, // [VK_FORMAT_ASTC_10x8_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT }, // [VK_FORMAT_ASTC_10x10_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT }, // [VK_FORMAT_ASTC_10x10_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT }, // [VK_FORMAT_ASTC_12x10_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT }, // [VK_FORMAT_ASTC_12x10_SRGB_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT }, // [VK_FORMAT_ASTC_12x12_UNORM_BLOCK]
- { 16, 4, VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT }, // [VK_FORMAT_ASTC_12x12_SRGB_BLOCK]
+ {0, 0, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}, // [VK_FORMAT_UNDEFINED]
+ {1, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}, // [VK_FORMAT_R4G4_UNORM_PACK8]
+ {2, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R4G4B4A4_UNORM_PACK16]
+ {2, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_B4G4R4A4_UNORM_PACK16]
+ {2, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R5G6B5_UNORM_PACK16]
+ {2, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_B5G6R5_UNORM_PACK16]
+ {2, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R5G5B5A1_UNORM_PACK16]
+ {2, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_B5G5R5A1_UNORM_PACK16]
+ {2, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_A1R5G5B5_UNORM_PACK16]
+ {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}, // [VK_FORMAT_R8_UNORM]
+ {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}, // [VK_FORMAT_R8_SNORM]
+ {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}, // [VK_FORMAT_R8_USCALED]
+ {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}, // [VK_FORMAT_R8_SSCALED]
+ {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}, // [VK_FORMAT_R8_UINT]
+ {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}, // [VK_FORMAT_R8_SINT]
+ {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_8_BIT}, // [VK_FORMAT_R8_SRGB]
+ {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R8G8_UNORM]
+ {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R8G8_SNORM]
+ {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R8G8_USCALED]
+ {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R8G8_SSCALED]
+ {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R8G8_UINT]
+ {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R8G8_SINT]
+ {2, 2, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R8G8_SRGB]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_R8G8B8_UNORM]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_R8G8B8_SNORM]
+ {3, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_R8G8B8_USCALED]
+ {3, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_R8G8B8_SSCALED]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_R8G8B8_UINT]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_R8G8B8_SINT]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_R8G8B8_SRGB]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_B8G8R8_UNORM]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_B8G8R8_SNORM]
+ {3, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_B8G8R8_USCALED]
+ {3, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_B8G8R8_SSCALED]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_B8G8R8_UINT]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_B8G8R8_SINT]
+ {3, 3, VK_FORMAT_COMPATIBILITY_CLASS_24_BIT}, // [VK_FORMAT_B8G8R8_SRGB]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R8G8B8A8_UNORM]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R8G8B8A8_SNORM]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R8G8B8A8_USCALED]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R8G8B8A8_SSCALED]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R8G8B8A8_UINT]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R8G8B8A8_SINT]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R8G8B8A8_SRGB]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B8G8R8A8_UNORM]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B8G8R8A8_SNORM]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B8G8R8A8_USCALED]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B8G8R8A8_SSCALED]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B8G8R8A8_UINT]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B8G8R8A8_SINT]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B8G8R8A8_SRGB]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A8B8G8R8_UNORM_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A8B8G8R8_SNORM_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A8B8G8R8_USCALED_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A8B8G8R8_SSCALED_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A8B8G8R8_UINT_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A8B8G8R8_SINT_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B8G8R8A8_SRGB_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2R10G10B10_UNORM_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2R10G10B10_SNORM_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2R10G10B10_USCALED_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2R10G10B10_SSCALED_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2R10G10B10_UINT_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2R10G10B10_SINT_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2B10G10R10_UNORM_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2B10G10R10_SNORM_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2B10G10R10_USCALED_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2B10G10R10_SSCALED_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2B10G10R10_UINT_PACK32]
+ {4, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_A2B10G10R10_SINT_PACK32]
+ {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R16_UNORM]
+ {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R16_SNORM]
+ {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R16_USCALED]
+ {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R16_SSCALED]
+ {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R16_UINT]
+ {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R16_SINT]
+ {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_16_BIT}, // [VK_FORMAT_R16_SFLOAT]
+ {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R16G16_UNORM]
+ {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R16G16_SNORM]
+ {4, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R16G16_USCALED]
+ {4, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R16G16_SSCALED]
+ {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R16G16_UINT]
+ {4, 2, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R16G16_SINT]
+ {4, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R16G16_SFLOAT]
+ {6, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}, // [VK_FORMAT_R16G16B16_UNORM]
+ {6, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}, // [VK_FORMAT_R16G16B16_SNORM]
+ {6, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}, // [VK_FORMAT_R16G16B16_USCALED]
+ {6, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}, // [VK_FORMAT_R16G16B16_SSCALED]
+ {6, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}, // [VK_FORMAT_R16G16B16_UINT]
+ {6, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}, // [VK_FORMAT_R16G16B16_SINT]
+ {6, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_48_BIT}, // [VK_FORMAT_R16G16B16_SFLOAT]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R16G16B16A16_UNORM]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R16G16B16A16_SNORM]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R16G16B16A16_USCALED]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R16G16B16A16_SSCALED]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R16G16B16A16_UINT]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R16G16B16A16_SINT]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R16G16B16A16_SFLOAT]
+ {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R32_UINT]
+ {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R32_SINT]
+ {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_R32_SFLOAT]
+ {8, 2, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R32G32_UINT]
+ {8, 2, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R32G32_SINT]
+ {8, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R32G32_SFLOAT]
+ {12, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_96_BIT}, // [VK_FORMAT_R32G32B32_UINT]
+ {12, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_96_BIT}, // [VK_FORMAT_R32G32B32_SINT]
+ {12, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_96_BIT}, // [VK_FORMAT_R32G32B32_SFLOAT]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}, // [VK_FORMAT_R32G32B32A32_UINT]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}, // [VK_FORMAT_R32G32B32A32_SINT]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}, // [VK_FORMAT_R32G32B32A32_SFLOAT]
+ {8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R64_UINT]
+ {8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R64_SINT]
+ {8, 1, VK_FORMAT_COMPATIBILITY_CLASS_64_BIT}, // [VK_FORMAT_R64_SFLOAT]
+ {16, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}, // [VK_FORMAT_R64G64_UINT]
+ {16, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}, // [VK_FORMAT_R64G64_SINT]
+ {16, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_128_BIT}, // [VK_FORMAT_R64G64_SFLOAT]
+ {24, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_192_BIT}, // [VK_FORMAT_R64G64B64_UINT]
+ {24, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_192_BIT}, // [VK_FORMAT_R64G64B64_SINT]
+ {24, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_192_BIT}, // [VK_FORMAT_R64G64B64_SFLOAT]
+ {32, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_256_BIT}, // [VK_FORMAT_R64G64B64A64_UINT]
+ {32, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_256_BIT}, // [VK_FORMAT_R64G64B64A64_SINT]
+ {32, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_256_BIT}, // [VK_FORMAT_R64G64B64A64_SFLOAT]
+ {4, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_B10G11R11_UFLOAT_PACK32]
+ {4, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT}, // [VK_FORMAT_E5B9G9R9_UFLOAT_PACK32]
+ {2, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}, // [VK_FORMAT_D16_UNORM]
+ {3, 1,
+ VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}, // [VK_FORMAT_X8_D24_UNORM_PACK32]
+ {4, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}, // [VK_FORMAT_D32_SFLOAT]
+ {1, 1, VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}, // [VK_FORMAT_S8_UINT]
+ {3, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}, // [VK_FORMAT_D16_UNORM_S8_UINT]
+ {4, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}, // [VK_FORMAT_D24_UNORM_S8_UINT]
+ {4, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT}, // [VK_FORMAT_D32_SFLOAT_S8_UINT]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT}, // [VK_FORMAT_BC1_RGB_UNORM_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT}, // [VK_FORMAT_BC1_RGB_SRGB_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT}, // [VK_FORMAT_BC1_RGBA_UNORM_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT}, // [VK_FORMAT_BC1_RGBA_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT}, // [VK_FORMAT_BC2_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT}, // [VK_FORMAT_BC2_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT}, // [VK_FORMAT_BC3_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT}, // [VK_FORMAT_BC3_SRGB_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT}, // [VK_FORMAT_BC4_UNORM_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT}, // [VK_FORMAT_BC4_SNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT}, // [VK_FORMAT_BC5_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT}, // [VK_FORMAT_BC5_SNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT}, // [VK_FORMAT_BC6H_UFLOAT_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT}, // [VK_FORMAT_BC6H_SFLOAT_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT}, // [VK_FORMAT_BC7_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT}, // [VK_FORMAT_BC7_SRGB_BLOCK]
+ {8, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT}, // [VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK]
+ {8, 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT}, // [VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT}, // [VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT}, // [VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT}, // [VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK]
+ {8, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT}, // [VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK]
+ {8, 1,
+ VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT}, // [VK_FORMAT_EAC_R11_UNORM_BLOCK]
+ {8, 1,
+ VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT}, // [VK_FORMAT_EAC_R11_SNORM_BLOCK]
+ {16, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT}, // [VK_FORMAT_EAC_R11G11_UNORM_BLOCK]
+ {16, 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT}, // [VK_FORMAT_EAC_R11G11_SNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT}, // [VK_FORMAT_ASTC_4x4_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT}, // [VK_FORMAT_ASTC_4x4_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT}, // [VK_FORMAT_ASTC_5x4_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT}, // [VK_FORMAT_ASTC_5x4_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT}, // [VK_FORMAT_ASTC_5x5_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT}, // [VK_FORMAT_ASTC_5x5_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT}, // [VK_FORMAT_ASTC_6x5_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT}, // [VK_FORMAT_ASTC_6x5_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT}, // [VK_FORMAT_ASTC_6x6_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT}, // [VK_FORMAT_ASTC_6x6_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT}, // [VK_FORMAT_ASTC_8x5_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT}, // [VK_FORMAT_ASTC_8x5_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT}, // [VK_FORMAT_ASTC_8x6_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT}, // [VK_FORMAT_ASTC_8x6_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT}, // [VK_FORMAT_ASTC_8x8_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT}, // [VK_FORMAT_ASTC_8x8_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT}, // [VK_FORMAT_ASTC_10x5_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT}, // [VK_FORMAT_ASTC_10x5_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT}, // [VK_FORMAT_ASTC_10x6_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT}, // [VK_FORMAT_ASTC_10x6_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT}, // [VK_FORMAT_ASTC_10x8_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT}, // [VK_FORMAT_ASTC_10x8_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT}, // [VK_FORMAT_ASTC_10x10_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT}, // [VK_FORMAT_ASTC_10x10_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT}, // [VK_FORMAT_ASTC_12x10_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT}, // [VK_FORMAT_ASTC_12x10_SRGB_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT}, // [VK_FORMAT_ASTC_12x12_UNORM_BLOCK]
+ {16, 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT}, // [VK_FORMAT_ASTC_12x12_SRGB_BLOCK]
};
// Return true if format is a depth or stencil format
-bool vk_format_is_depth_or_stencil(VkFormat format)
-{
+bool vk_format_is_depth_or_stencil(VkFormat format) {
return (vk_format_is_depth_and_stencil(format) ||
- vk_format_is_depth_only(format) ||
+ vk_format_is_depth_only(format) ||
vk_format_is_stencil_only(format));
}
// Return true if format contains depth and stencil information
-bool vk_format_is_depth_and_stencil(VkFormat format)
-{
+bool vk_format_is_depth_and_stencil(VkFormat format) {
bool is_ds = false;
switch (format) {
@@ -258,14 +392,12 @@
}
// Return true if format is a stencil-only format
-bool vk_format_is_stencil_only(VkFormat format)
-{
+bool vk_format_is_stencil_only(VkFormat format) {
return (format == VK_FORMAT_S8_UINT);
}
// Return true if format is a depth-only format
-bool vk_format_is_depth_only(VkFormat format)
-{
+bool vk_format_is_depth_only(VkFormat format) {
bool is_depth = false;
switch (format) {
@@ -282,8 +414,7 @@
}
// Return true if format is of time UNORM
-bool vk_format_is_norm(VkFormat format)
-{
+bool vk_format_is_norm(VkFormat format) {
bool is_norm = false;
switch (format) {
@@ -357,16 +488,13 @@
return is_norm;
};
-
// Return true if format is an integer format
-bool vk_format_is_int(VkFormat format)
-{
+bool vk_format_is_int(VkFormat format) {
return (vk_format_is_sint(format) || vk_format_is_uint(format));
}
// Return true if format is an unsigned integer format
-bool vk_format_is_uint(VkFormat format)
-{
+bool vk_format_is_uint(VkFormat format) {
bool is_uint = false;
switch (format) {
@@ -401,8 +529,7 @@
}
// Return true if format is a signed integer format
-bool vk_format_is_sint(VkFormat format)
-{
+bool vk_format_is_sint(VkFormat format) {
bool is_sint = false;
switch (format) {
@@ -437,8 +564,7 @@
}
// Return true if format is a floating-point format
-bool vk_format_is_float(VkFormat format)
-{
+bool vk_format_is_float(VkFormat format) {
bool is_float = false;
switch (format) {
@@ -468,8 +594,7 @@
}
// Return true if format is in the SRGB colorspace
-bool vk_format_is_srgb(VkFormat format)
-{
+bool vk_format_is_srgb(VkFormat format) {
bool is_srgb = false;
switch (format) {
@@ -511,8 +636,7 @@
}
// Return true if format is compressed
-bool vk_format_is_compressed(VkFormat format)
-{
+bool vk_format_is_compressed(VkFormat format) {
switch (format) {
case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
@@ -573,31 +697,25 @@
}
// Return format class of the specified format
-VkFormatCompatibilityClass vk_format_get_compatibility_class(VkFormat format)
-{
+VkFormatCompatibilityClass vk_format_get_compatibility_class(VkFormat format) {
return vk_format_table[format].format_class;
}
// Return size, in bytes, of a pixel of the specified format
-size_t vk_format_get_size(VkFormat format)
-{
+size_t vk_format_get_size(VkFormat format) {
return vk_format_table[format].size;
}
// Return the number of channels for a given format
-unsigned int vk_format_get_channel_count(VkFormat format)
-{
+unsigned int vk_format_get_channel_count(VkFormat format) {
return vk_format_table[format].channel_count;
}
// Perform a zero-tolerant modulo operation
-VkDeviceSize vk_safe_modulo(VkDeviceSize dividend, VkDeviceSize divisor)
-{
+VkDeviceSize vk_safe_modulo(VkDeviceSize dividend, VkDeviceSize divisor) {
VkDeviceSize result = 0;
if (divisor != 0) {
result = dividend % divisor;
}
return result;
}
-
-
diff --git a/layers/vk_layer_utils.h b/layers/vk_layer_utils.h
index 9c44328..7ac5812 100644
--- a/layers/vk_layer_utils.h
+++ b/layers/vk_layer_utils.h
@@ -42,57 +42,55 @@
#endif
typedef enum VkFormatCompatibilityClass {
- VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT = 0,
- VK_FORMAT_COMPATIBILITY_CLASS_8_BIT = 1,
- VK_FORMAT_COMPATIBILITY_CLASS_16_BIT = 2,
- VK_FORMAT_COMPATIBILITY_CLASS_24_BIT = 3,
- VK_FORMAT_COMPATIBILITY_CLASS_32_BIT = 4,
- VK_FORMAT_COMPATIBILITY_CLASS_48_BIT = 5,
- VK_FORMAT_COMPATIBILITY_CLASS_64_BIT = 6,
- VK_FORMAT_COMPATIBILITY_CLASS_96_BIT = 7,
- VK_FORMAT_COMPATIBILITY_CLASS_128_BIT = 8,
- VK_FORMAT_COMPATIBILITY_CLASS_192_BIT = 9,
- VK_FORMAT_COMPATIBILITY_CLASS_256_BIT = 10,
- VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT = 11,
- VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT = 12,
- VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT = 13,
- VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT = 14,
- VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT = 15,
- VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT = 16,
- VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT = 17,
- VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT = 18,
- VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT = 19,
- VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT = 20,
+ VK_FORMAT_COMPATIBILITY_CLASS_NONE_BIT = 0,
+ VK_FORMAT_COMPATIBILITY_CLASS_8_BIT = 1,
+ VK_FORMAT_COMPATIBILITY_CLASS_16_BIT = 2,
+ VK_FORMAT_COMPATIBILITY_CLASS_24_BIT = 3,
+ VK_FORMAT_COMPATIBILITY_CLASS_32_BIT = 4,
+ VK_FORMAT_COMPATIBILITY_CLASS_48_BIT = 5,
+ VK_FORMAT_COMPATIBILITY_CLASS_64_BIT = 6,
+ VK_FORMAT_COMPATIBILITY_CLASS_96_BIT = 7,
+ VK_FORMAT_COMPATIBILITY_CLASS_128_BIT = 8,
+ VK_FORMAT_COMPATIBILITY_CLASS_192_BIT = 9,
+ VK_FORMAT_COMPATIBILITY_CLASS_256_BIT = 10,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGB_BIT = 11,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC1_RGBA_BIT = 12,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC2_BIT = 13,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC3_BIT = 14,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC4_BIT = 15,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC5_BIT = 16,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC6H_BIT = 17,
+ VK_FORMAT_COMPATIBILITY_CLASS_BC7_BIT = 18,
+ VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGB_BIT = 19,
+ VK_FORMAT_COMPATIBILITY_CLASS_ETC2_RGBA_BIT = 20,
VK_FORMAT_COMPATIBILITY_CLASS_ETC2_EAC_RGBA_BIT = 21,
- VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT = 22,
- VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT = 23,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT = 24,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT = 25,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT = 26,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT = 27,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT = 28,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT = 29,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT = 20,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT = 31,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT = 32,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT = 33,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT = 34,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT = 35,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT = 36,
- VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT = 37,
- VK_FORMAT_COMPATIBILITY_CLASS_D16_BIT = 38,
- VK_FORMAT_COMPATIBILITY_CLASS_D24_BIT = 39,
- VK_FORMAT_COMPATIBILITY_CLASS_D32_BIT = 30,
- VK_FORMAT_COMPATIBILITY_CLASS_S8_BIT = 41,
- VK_FORMAT_COMPATIBILITY_CLASS_D16S8_BIT = 42,
- VK_FORMAT_COMPATIBILITY_CLASS_D24S8_BIT = 43,
- VK_FORMAT_COMPATIBILITY_CLASS_D32S8_BIT = 44,
- VK_FORMAT_COMPATIBILITY_CLASS_MAX_ENUM = 45
+ VK_FORMAT_COMPATIBILITY_CLASS_EAC_R_BIT = 22,
+ VK_FORMAT_COMPATIBILITY_CLASS_EAC_RG_BIT = 23,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_4X4_BIT = 24,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X4_BIT = 25,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_5X5_BIT = 26,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X5_BIT = 27,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_6X6_BIT = 28,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X5_BIT = 29,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X6_BIT = 20,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_8X8_BIT = 31,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X5_BIT = 32,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X6_BIT = 33,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X8_BIT = 34,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_10X10_BIT = 35,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X10_BIT = 36,
+ VK_FORMAT_COMPATIBILITY_CLASS_ASTC_12X12_BIT = 37,
+ VK_FORMAT_COMPATIBILITY_CLASS_D16_BIT = 38,
+ VK_FORMAT_COMPATIBILITY_CLASS_D24_BIT = 39,
+ VK_FORMAT_COMPATIBILITY_CLASS_D32_BIT = 30,
+ VK_FORMAT_COMPATIBILITY_CLASS_S8_BIT = 41,
+ VK_FORMAT_COMPATIBILITY_CLASS_D16S8_BIT = 42,
+ VK_FORMAT_COMPATIBILITY_CLASS_D24S8_BIT = 43,
+ VK_FORMAT_COMPATIBILITY_CLASS_D32S8_BIT = 44,
+ VK_FORMAT_COMPATIBILITY_CLASS_MAX_ENUM = 45
} VkFormatCompatibilityClass;
-
-static inline bool vk_format_is_undef(VkFormat format)
-{
+static inline bool vk_format_is_undef(VkFormat format) {
return (format == VK_FORMAT_UNDEFINED);
}
@@ -101,25 +99,24 @@
bool vk_format_is_depth_only(VkFormat format);
bool vk_format_is_stencil_only(VkFormat format);
-static inline bool vk_format_is_color(VkFormat format)
-{
- return !(vk_format_is_undef(format) || vk_format_is_depth_or_stencil(format));
+static inline bool vk_format_is_color(VkFormat format) {
+ return !(vk_format_is_undef(format) ||
+ vk_format_is_depth_or_stencil(format));
}
-bool vk_format_is_norm(VkFormat format);
-bool vk_format_is_int(VkFormat format);
-bool vk_format_is_sint(VkFormat format);
-bool vk_format_is_uint(VkFormat format);
-bool vk_format_is_float(VkFormat format);
-bool vk_format_is_srgb(VkFormat format);
-bool vk_format_is_compressed(VkFormat format);
-size_t vk_format_get_size(VkFormat format);
-unsigned int vk_format_get_channel_count(VkFormat format);
+bool vk_format_is_norm(VkFormat format);
+bool vk_format_is_int(VkFormat format);
+bool vk_format_is_sint(VkFormat format);
+bool vk_format_is_uint(VkFormat format);
+bool vk_format_is_float(VkFormat format);
+bool vk_format_is_srgb(VkFormat format);
+bool vk_format_is_compressed(VkFormat format);
+size_t vk_format_get_size(VkFormat format);
+unsigned int vk_format_get_channel_count(VkFormat format);
VkFormatCompatibilityClass vk_format_get_compatibility_class(VkFormat format);
-VkDeviceSize vk_safe_modulo(VkDeviceSize dividend, VkDeviceSize divisor);
+VkDeviceSize vk_safe_modulo(VkDeviceSize dividend, VkDeviceSize divisor);
-static inline int u_ffs(int val)
-{
+static inline int u_ffs(int val) {
#ifdef WIN32
unsigned long bit_pos = 0;
if (_BitScanForward(&bit_pos, val) != 0) {
@@ -134,5 +131,3 @@
#ifdef __cplusplus
}
#endif
-
-