repo: Clang-format LVL source files using Google
Switch clang-format standard from the LLVM style to the
Google style for more consistency.
Change-Id: I247c4abc275d7873a91522e1e234198adaa24033
diff --git a/layers/buffer_validation.h b/layers/buffer_validation.h
index b8c094d..a5fd396 100644
--- a/layers/buffer_validation.h
+++ b/layers/buffer_validation.h
@@ -31,4 +31,4 @@
std::unordered_map<ImageSubresourcePair, IMAGE_LAYOUT_NODE> *imageLayoutMap,
const VkImageCreateInfo *pCreateInfo, VkImage *pImage);
-#endif // CORE_VALIDATION_BUFFER_VALIDATION_H_
+#endif // CORE_VALIDATION_BUFFER_VALIDATION_H_
diff --git a/layers/core_validation.cpp b/layers/core_validation.cpp
index e682aed..3feff50 100644
--- a/layers/core_validation.cpp
+++ b/layers/core_validation.cpp
@@ -72,10 +72,10 @@
#include <android/log.h>
#define LOGCONSOLE(...) ((void)__android_log_print(ANDROID_LOG_INFO, "DS", __VA_ARGS__))
#else
-#define LOGCONSOLE(...) \
- { \
- printf(__VA_ARGS__); \
- printf("\n"); \
+#define LOGCONSOLE(...) \
+ { \
+ printf(__VA_ARGS__); \
+ printf("\n"); \
}
#endif
@@ -139,7 +139,7 @@
VkLayerDispatchTable dispatch_table;
devExts device_extensions = {};
- unordered_set<VkQueue> queues; // All queues under given device
+ unordered_set<VkQueue> queues; // All queues under given device
// Global set of all cmdBuffers that are inFlight on this device
unordered_set<VkCommandBuffer> globalInFlightCmdBuffers;
// Layer specific data
@@ -171,7 +171,7 @@
VkDevice device = VK_NULL_HANDLE;
VkPhysicalDevice physical_device = VK_NULL_HANDLE;
- instance_layer_data *instance_data = nullptr; // from device to enclosing instance
+ instance_layer_data *instance_data = nullptr; // from device to enclosing instance
VkPhysicalDeviceFeatures enabled_features = {};
// Device specific data
@@ -190,7 +190,8 @@
"VK_LAYER_LUNARG_core_validation", VK_LAYER_API_VERSION, 1, "LunarG Validation Layer",
};
-template <class TCreateInfo> void ValidateLayerOrdering(const TCreateInfo &createInfo) {
+template <class TCreateInfo>
+void ValidateLayerOrdering(const TCreateInfo &createInfo) {
bool foundLayer = false;
for (uint32_t i = 0; i < createInfo.enabledLayerCount; ++i) {
if (!strcmp(createInfo.ppEnabledLayerNames[i], global_layer.layerName)) {
@@ -235,13 +236,13 @@
bool operator!=(spirv_inst_iter const &other) { return it != other.it; }
- spirv_inst_iter operator++(int) { // x++
+ spirv_inst_iter operator++(int) { // x++
spirv_inst_iter ii = *this;
it += len();
return ii;
}
- spirv_inst_iter operator++() { // ++x;
+ spirv_inst_iter operator++() { // ++x;
it += len();
return *this;
}
@@ -261,13 +262,12 @@
shader_module(VkShaderModuleCreateInfo const *pCreateInfo)
: words((uint32_t *)pCreateInfo->pCode, (uint32_t *)pCreateInfo->pCode + pCreateInfo->codeSize / sizeof(uint32_t)),
def_index() {
-
build_def_index(this);
}
// Expose begin() / end() to enable range-based for
- spirv_inst_iter begin() const { return spirv_inst_iter(words.begin(), words.begin() + 5); } // First insn
- spirv_inst_iter end() const { return spirv_inst_iter(words.begin(), words.end()); } // Just past last insn
+ spirv_inst_iter begin() const { return spirv_inst_iter(words.begin(), words.begin() + 5); } // First insn
+ spirv_inst_iter end() const { return spirv_inst_iter(words.begin(), words.end()); } // Just past last insn
// Given an offset into the module, produce an iterator there.
spirv_inst_iter at(unsigned offset) const { return spirv_inst_iter(words.begin(), words.begin() + offset); }
@@ -408,12 +408,12 @@
// Return ptr to memory binding for given handle of specified type
static BINDABLE *GetObjectMemBinding(layer_data *my_data, uint64_t handle, VkDebugReportObjectTypeEXT type) {
switch (type) {
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- return getImageState(my_data, VkImage(handle));
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- return getBufferState(my_data, VkBuffer(handle));
- default:
- break;
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
+ return getImageState(my_data, VkImage(handle));
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
+ return getBufferState(my_data, VkBuffer(handle));
+ default:
+ break;
}
return nullptr;
}
@@ -493,40 +493,40 @@
// TODO: Unify string helper functions, this should really come out of a string helper if not there already
static const char *object_type_to_string(VkDebugReportObjectTypeEXT type) {
switch (type) {
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
- return "image";
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
- return "buffer";
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT:
- return "image view";
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT:
- return "buffer view";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
- return "swapchain";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT:
- return "descriptor set";
- case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT:
- return "framebuffer";
- case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT:
- return "event";
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT:
- return "query pool";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT:
- return "descriptor pool";
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT:
- return "command pool";
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT:
- return "pipeline";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT:
- return "sampler";
- case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT:
- return "renderpass";
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT:
- return "device memory";
- case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT:
- return "semaphore";
- default:
- return "unknown";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT:
+ return "image";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT:
+ return "buffer";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT:
+ return "image view";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT:
+ return "buffer view";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT:
+ return "swapchain";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT:
+ return "descriptor set";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT:
+ return "framebuffer";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT:
+ return "event";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT:
+ return "query pool";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT:
+ return "descriptor pool";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT:
+ return "command pool";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT:
+ return "pipeline";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT:
+ return "sampler";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT:
+ return "renderpass";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT:
+ return "device memory";
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT:
+ return "semaphore";
+ default:
+ return "unknown";
}
}
@@ -596,7 +596,6 @@
// Skip validation if this image was created through WSI
if (mem != MEMTRACKER_SWAP_CHAIN_IMAGE_KEY) {
-
// First update CB binding in MemObj mini CB list
DEVICE_MEM_INFO *pMemInfo = getMemObjInfo(dev_data, mem);
if (pMemInfo) {
@@ -719,7 +718,7 @@
if (mem_binding) {
if (!mem_binding->sparse) {
skip = ClearMemoryObjectBinding(dev_data, handle, type, mem_binding->binding.mem);
- } else { // Sparse, clear all bindings
+ } else { // Sparse, clear all bindings
for (auto &sparse_mem_binding : mem_binding->sparse_bindings) {
skip |= ClearMemoryObjectBinding(dev_data, handle, type, sparse_mem_binding.mem);
}
@@ -734,15 +733,15 @@
bool result = false;
if (VK_NULL_HANDLE == mem) {
result = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, handle,
- __LINE__, error_code, "MEM",
- "%s: Vk%s object 0x%" PRIxLEAST64 " used with no memory bound. Memory should be bound by calling "
- "vkBind%sMemory(). %s",
+ __LINE__, error_code, "MEM", "%s: Vk%s object 0x%" PRIxLEAST64
+ " used with no memory bound. Memory should be bound by calling "
+ "vkBind%sMemory(). %s",
api_name, type_name, handle, type_name, validation_error_map[error_code]);
} else if (MEMORY_UNBOUND == mem) {
result = log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, handle,
- __LINE__, error_code, "MEM",
- "%s: Vk%s object 0x%" PRIxLEAST64 " used with no memory bound and previously bound memory was freed. "
- "Memory must not be freed prior to this operation. %s",
+ __LINE__, error_code, "MEM", "%s: Vk%s object 0x%" PRIxLEAST64
+ " used with no memory bound and previously bound memory was freed. "
+ "Memory must not be freed prior to this operation. %s",
api_name, type_name, handle, validation_error_map[error_code]);
}
return result;
@@ -856,98 +855,98 @@
// Return a string representation of CMD_TYPE enum
static string cmdTypeToString(CMD_TYPE cmd) {
switch (cmd) {
- case CMD_BINDPIPELINE:
- return "CMD_BINDPIPELINE";
- case CMD_BINDPIPELINEDELTA:
- return "CMD_BINDPIPELINEDELTA";
- case CMD_SETVIEWPORTSTATE:
- return "CMD_SETVIEWPORTSTATE";
- case CMD_SETLINEWIDTHSTATE:
- return "CMD_SETLINEWIDTHSTATE";
- case CMD_SETDEPTHBIASSTATE:
- return "CMD_SETDEPTHBIASSTATE";
- case CMD_SETBLENDSTATE:
- return "CMD_SETBLENDSTATE";
- case CMD_SETDEPTHBOUNDSSTATE:
- return "CMD_SETDEPTHBOUNDSSTATE";
- case CMD_SETSTENCILREADMASKSTATE:
- return "CMD_SETSTENCILREADMASKSTATE";
- case CMD_SETSTENCILWRITEMASKSTATE:
- return "CMD_SETSTENCILWRITEMASKSTATE";
- case CMD_SETSTENCILREFERENCESTATE:
- return "CMD_SETSTENCILREFERENCESTATE";
- case CMD_BINDDESCRIPTORSETS:
- return "CMD_BINDDESCRIPTORSETS";
- case CMD_BINDINDEXBUFFER:
- return "CMD_BINDINDEXBUFFER";
- case CMD_BINDVERTEXBUFFER:
- return "CMD_BINDVERTEXBUFFER";
- case CMD_DRAW:
- return "CMD_DRAW";
- case CMD_DRAWINDEXED:
- return "CMD_DRAWINDEXED";
- case CMD_DRAWINDIRECT:
- return "CMD_DRAWINDIRECT";
- case CMD_DRAWINDEXEDINDIRECT:
- return "CMD_DRAWINDEXEDINDIRECT";
- case CMD_DISPATCH:
- return "CMD_DISPATCH";
- case CMD_DISPATCHINDIRECT:
- return "CMD_DISPATCHINDIRECT";
- case CMD_COPYBUFFER:
- return "CMD_COPYBUFFER";
- case CMD_COPYIMAGE:
- return "CMD_COPYIMAGE";
- case CMD_BLITIMAGE:
- return "CMD_BLITIMAGE";
- case CMD_COPYBUFFERTOIMAGE:
- return "CMD_COPYBUFFERTOIMAGE";
- case CMD_COPYIMAGETOBUFFER:
- return "CMD_COPYIMAGETOBUFFER";
- case CMD_CLONEIMAGEDATA:
- return "CMD_CLONEIMAGEDATA";
- case CMD_UPDATEBUFFER:
- return "CMD_UPDATEBUFFER";
- case CMD_FILLBUFFER:
- return "CMD_FILLBUFFER";
- case CMD_CLEARCOLORIMAGE:
- return "CMD_CLEARCOLORIMAGE";
- case CMD_CLEARATTACHMENTS:
- return "CMD_CLEARCOLORATTACHMENT";
- case CMD_CLEARDEPTHSTENCILIMAGE:
- return "CMD_CLEARDEPTHSTENCILIMAGE";
- case CMD_RESOLVEIMAGE:
- return "CMD_RESOLVEIMAGE";
- case CMD_SETEVENT:
- return "CMD_SETEVENT";
- case CMD_RESETEVENT:
- return "CMD_RESETEVENT";
- case CMD_WAITEVENTS:
- return "CMD_WAITEVENTS";
- case CMD_PIPELINEBARRIER:
- return "CMD_PIPELINEBARRIER";
- case CMD_BEGINQUERY:
- return "CMD_BEGINQUERY";
- case CMD_ENDQUERY:
- return "CMD_ENDQUERY";
- case CMD_RESETQUERYPOOL:
- return "CMD_RESETQUERYPOOL";
- case CMD_COPYQUERYPOOLRESULTS:
- return "CMD_COPYQUERYPOOLRESULTS";
- case CMD_WRITETIMESTAMP:
- return "CMD_WRITETIMESTAMP";
- case CMD_INITATOMICCOUNTERS:
- return "CMD_INITATOMICCOUNTERS";
- case CMD_LOADATOMICCOUNTERS:
- return "CMD_LOADATOMICCOUNTERS";
- case CMD_SAVEATOMICCOUNTERS:
- return "CMD_SAVEATOMICCOUNTERS";
- case CMD_BEGINRENDERPASS:
- return "CMD_BEGINRENDERPASS";
- case CMD_ENDRENDERPASS:
- return "CMD_ENDRENDERPASS";
- default:
- return "UNKNOWN";
+ case CMD_BINDPIPELINE:
+ return "CMD_BINDPIPELINE";
+ case CMD_BINDPIPELINEDELTA:
+ return "CMD_BINDPIPELINEDELTA";
+ case CMD_SETVIEWPORTSTATE:
+ return "CMD_SETVIEWPORTSTATE";
+ case CMD_SETLINEWIDTHSTATE:
+ return "CMD_SETLINEWIDTHSTATE";
+ case CMD_SETDEPTHBIASSTATE:
+ return "CMD_SETDEPTHBIASSTATE";
+ case CMD_SETBLENDSTATE:
+ return "CMD_SETBLENDSTATE";
+ case CMD_SETDEPTHBOUNDSSTATE:
+ return "CMD_SETDEPTHBOUNDSSTATE";
+ case CMD_SETSTENCILREADMASKSTATE:
+ return "CMD_SETSTENCILREADMASKSTATE";
+ case CMD_SETSTENCILWRITEMASKSTATE:
+ return "CMD_SETSTENCILWRITEMASKSTATE";
+ case CMD_SETSTENCILREFERENCESTATE:
+ return "CMD_SETSTENCILREFERENCESTATE";
+ case CMD_BINDDESCRIPTORSETS:
+ return "CMD_BINDDESCRIPTORSETS";
+ case CMD_BINDINDEXBUFFER:
+ return "CMD_BINDINDEXBUFFER";
+ case CMD_BINDVERTEXBUFFER:
+ return "CMD_BINDVERTEXBUFFER";
+ case CMD_DRAW:
+ return "CMD_DRAW";
+ case CMD_DRAWINDEXED:
+ return "CMD_DRAWINDEXED";
+ case CMD_DRAWINDIRECT:
+ return "CMD_DRAWINDIRECT";
+ case CMD_DRAWINDEXEDINDIRECT:
+ return "CMD_DRAWINDEXEDINDIRECT";
+ case CMD_DISPATCH:
+ return "CMD_DISPATCH";
+ case CMD_DISPATCHINDIRECT:
+ return "CMD_DISPATCHINDIRECT";
+ case CMD_COPYBUFFER:
+ return "CMD_COPYBUFFER";
+ case CMD_COPYIMAGE:
+ return "CMD_COPYIMAGE";
+ case CMD_BLITIMAGE:
+ return "CMD_BLITIMAGE";
+ case CMD_COPYBUFFERTOIMAGE:
+ return "CMD_COPYBUFFERTOIMAGE";
+ case CMD_COPYIMAGETOBUFFER:
+ return "CMD_COPYIMAGETOBUFFER";
+ case CMD_CLONEIMAGEDATA:
+ return "CMD_CLONEIMAGEDATA";
+ case CMD_UPDATEBUFFER:
+ return "CMD_UPDATEBUFFER";
+ case CMD_FILLBUFFER:
+ return "CMD_FILLBUFFER";
+ case CMD_CLEARCOLORIMAGE:
+ return "CMD_CLEARCOLORIMAGE";
+ case CMD_CLEARATTACHMENTS:
+ return "CMD_CLEARCOLORATTACHMENT";
+ case CMD_CLEARDEPTHSTENCILIMAGE:
+ return "CMD_CLEARDEPTHSTENCILIMAGE";
+ case CMD_RESOLVEIMAGE:
+ return "CMD_RESOLVEIMAGE";
+ case CMD_SETEVENT:
+ return "CMD_SETEVENT";
+ case CMD_RESETEVENT:
+ return "CMD_RESETEVENT";
+ case CMD_WAITEVENTS:
+ return "CMD_WAITEVENTS";
+ case CMD_PIPELINEBARRIER:
+ return "CMD_PIPELINEBARRIER";
+ case CMD_BEGINQUERY:
+ return "CMD_BEGINQUERY";
+ case CMD_ENDQUERY:
+ return "CMD_ENDQUERY";
+ case CMD_RESETQUERYPOOL:
+ return "CMD_RESETQUERYPOOL";
+ case CMD_COPYQUERYPOOLRESULTS:
+ return "CMD_COPYQUERYPOOLRESULTS";
+ case CMD_WRITETIMESTAMP:
+ return "CMD_WRITETIMESTAMP";
+ case CMD_INITATOMICCOUNTERS:
+ return "CMD_INITATOMICCOUNTERS";
+ case CMD_LOADATOMICCOUNTERS:
+ return "CMD_LOADATOMICCOUNTERS";
+ case CMD_SAVEATOMICCOUNTERS:
+ return "CMD_SAVEATOMICCOUNTERS";
+ case CMD_BEGINRENDERPASS:
+ return "CMD_BEGINRENDERPASS";
+ case CMD_ENDRENDERPASS:
+ return "CMD_ENDRENDERPASS";
+ default:
+ return "UNKNOWN";
}
}
@@ -955,62 +954,62 @@
static void build_def_index(shader_module *module) {
for (auto insn : *module) {
switch (insn.opcode()) {
- // Types
- case spv::OpTypeVoid:
- case spv::OpTypeBool:
- case spv::OpTypeInt:
- case spv::OpTypeFloat:
- case spv::OpTypeVector:
- case spv::OpTypeMatrix:
- case spv::OpTypeImage:
- case spv::OpTypeSampler:
- case spv::OpTypeSampledImage:
- case spv::OpTypeArray:
- case spv::OpTypeRuntimeArray:
- case spv::OpTypeStruct:
- case spv::OpTypeOpaque:
- case spv::OpTypePointer:
- case spv::OpTypeFunction:
- case spv::OpTypeEvent:
- case spv::OpTypeDeviceEvent:
- case spv::OpTypeReserveId:
- case spv::OpTypeQueue:
- case spv::OpTypePipe:
- module->def_index[insn.word(1)] = insn.offset();
- break;
+ // Types
+ case spv::OpTypeVoid:
+ case spv::OpTypeBool:
+ case spv::OpTypeInt:
+ case spv::OpTypeFloat:
+ case spv::OpTypeVector:
+ case spv::OpTypeMatrix:
+ case spv::OpTypeImage:
+ case spv::OpTypeSampler:
+ case spv::OpTypeSampledImage:
+ case spv::OpTypeArray:
+ case spv::OpTypeRuntimeArray:
+ case spv::OpTypeStruct:
+ case spv::OpTypeOpaque:
+ case spv::OpTypePointer:
+ case spv::OpTypeFunction:
+ case spv::OpTypeEvent:
+ case spv::OpTypeDeviceEvent:
+ case spv::OpTypeReserveId:
+ case spv::OpTypeQueue:
+ case spv::OpTypePipe:
+ module->def_index[insn.word(1)] = insn.offset();
+ break;
- // Fixed constants
- case spv::OpConstantTrue:
- case spv::OpConstantFalse:
- case spv::OpConstant:
- case spv::OpConstantComposite:
- case spv::OpConstantSampler:
- case spv::OpConstantNull:
- module->def_index[insn.word(2)] = insn.offset();
- break;
+ // Fixed constants
+ case spv::OpConstantTrue:
+ case spv::OpConstantFalse:
+ case spv::OpConstant:
+ case spv::OpConstantComposite:
+ case spv::OpConstantSampler:
+ case spv::OpConstantNull:
+ module->def_index[insn.word(2)] = insn.offset();
+ break;
- // Specialization constants
- case spv::OpSpecConstantTrue:
- case spv::OpSpecConstantFalse:
- case spv::OpSpecConstant:
- case spv::OpSpecConstantComposite:
- case spv::OpSpecConstantOp:
- module->def_index[insn.word(2)] = insn.offset();
- break;
+ // Specialization constants
+ case spv::OpSpecConstantTrue:
+ case spv::OpSpecConstantFalse:
+ case spv::OpSpecConstant:
+ case spv::OpSpecConstantComposite:
+ case spv::OpSpecConstantOp:
+ module->def_index[insn.word(2)] = insn.offset();
+ break;
- // Variables
- case spv::OpVariable:
- module->def_index[insn.word(2)] = insn.offset();
- break;
+ // Variables
+ case spv::OpVariable:
+ module->def_index[insn.word(2)] = insn.offset();
+ break;
- // Functions
- case spv::OpFunction:
- module->def_index[insn.word(2)] = insn.offset();
- break;
+ // Functions
+ case spv::OpFunction:
+ module->def_index[insn.word(2)] = insn.offset();
+ break;
- default:
- // We don't care about any other defs for now.
- break;
+ default:
+ // We don't care about any other defs for now.
+ break;
}
}
}
@@ -1032,32 +1031,32 @@
static char const *storage_class_name(unsigned sc) {
switch (sc) {
- case spv::StorageClassInput:
- return "input";
- case spv::StorageClassOutput:
- return "output";
- case spv::StorageClassUniformConstant:
- return "const uniform";
- case spv::StorageClassUniform:
- return "uniform";
- case spv::StorageClassWorkgroup:
- return "workgroup local";
- case spv::StorageClassCrossWorkgroup:
- return "workgroup global";
- case spv::StorageClassPrivate:
- return "private global";
- case spv::StorageClassFunction:
- return "function";
- case spv::StorageClassGeneric:
- return "generic";
- case spv::StorageClassAtomicCounter:
- return "atomic counter";
- case spv::StorageClassImage:
- return "image";
- case spv::StorageClassPushConstant:
- return "push constant";
- default:
- return "unknown";
+ case spv::StorageClassInput:
+ return "input";
+ case spv::StorageClassOutput:
+ return "output";
+ case spv::StorageClassUniformConstant:
+ return "const uniform";
+ case spv::StorageClassUniform:
+ return "uniform";
+ case spv::StorageClassWorkgroup:
+ return "workgroup local";
+ case spv::StorageClassCrossWorkgroup:
+ return "workgroup global";
+ case spv::StorageClassPrivate:
+ return "private global";
+ case spv::StorageClassFunction:
+ return "function";
+ case spv::StorageClassGeneric:
+ return "generic";
+ case spv::StorageClassAtomicCounter:
+ return "atomic counter";
+ case spv::StorageClassImage:
+ return "image";
+ case spv::StorageClassPushConstant:
+ return "push constant";
+ default:
+ return "unknown";
}
}
@@ -1080,56 +1079,56 @@
assert(insn != src->end());
switch (insn.opcode()) {
- case spv::OpTypeBool:
- ss << "bool";
- break;
- case spv::OpTypeInt:
- ss << (insn.word(3) ? 's' : 'u') << "int" << insn.word(2);
- break;
- case spv::OpTypeFloat:
- ss << "float" << insn.word(2);
- break;
- case spv::OpTypeVector:
- ss << "vec" << insn.word(3) << " of ";
- describe_type_inner(ss, src, insn.word(2));
- break;
- case spv::OpTypeMatrix:
- ss << "mat" << insn.word(3) << " of ";
- describe_type_inner(ss, src, insn.word(2));
- break;
- case spv::OpTypeArray:
- ss << "arr[" << get_constant_value(src, insn.word(3)) << "] of ";
- describe_type_inner(ss, src, insn.word(2));
- break;
- case spv::OpTypePointer:
- ss << "ptr to " << storage_class_name(insn.word(2)) << " ";
- describe_type_inner(ss, src, insn.word(3));
- break;
- case spv::OpTypeStruct: {
- ss << "struct of (";
- for (unsigned i = 2; i < insn.len(); i++) {
- describe_type_inner(ss, src, insn.word(i));
- if (i == insn.len() - 1) {
- ss << ")";
- } else {
- ss << ", ";
+ case spv::OpTypeBool:
+ ss << "bool";
+ break;
+ case spv::OpTypeInt:
+ ss << (insn.word(3) ? 's' : 'u') << "int" << insn.word(2);
+ break;
+ case spv::OpTypeFloat:
+ ss << "float" << insn.word(2);
+ break;
+ case spv::OpTypeVector:
+ ss << "vec" << insn.word(3) << " of ";
+ describe_type_inner(ss, src, insn.word(2));
+ break;
+ case spv::OpTypeMatrix:
+ ss << "mat" << insn.word(3) << " of ";
+ describe_type_inner(ss, src, insn.word(2));
+ break;
+ case spv::OpTypeArray:
+ ss << "arr[" << get_constant_value(src, insn.word(3)) << "] of ";
+ describe_type_inner(ss, src, insn.word(2));
+ break;
+ case spv::OpTypePointer:
+ ss << "ptr to " << storage_class_name(insn.word(2)) << " ";
+ describe_type_inner(ss, src, insn.word(3));
+ break;
+ case spv::OpTypeStruct: {
+ ss << "struct of (";
+ for (unsigned i = 2; i < insn.len(); i++) {
+ describe_type_inner(ss, src, insn.word(i));
+ if (i == insn.len() - 1) {
+ ss << ")";
+ } else {
+ ss << ", ";
+ }
}
+ break;
}
- break;
- }
- case spv::OpTypeSampler:
- ss << "sampler";
- break;
- case spv::OpTypeSampledImage:
- ss << "sampler+";
- describe_type_inner(ss, src, insn.word(2));
- break;
- case spv::OpTypeImage:
- ss << "image(dim=" << insn.word(3) << ", sampled=" << insn.word(7) << ")";
- break;
- default:
- ss << "oddtype";
- break;
+ case spv::OpTypeSampler:
+ ss << "sampler";
+ break;
+ case spv::OpTypeSampledImage:
+ ss << "sampler+";
+ describe_type_inner(ss, src, insn.word(2));
+ break;
+ case spv::OpTypeImage:
+ ss << "image(dim=" << insn.word(3) << ", sampled=" << insn.word(7) << ")";
+ break;
+ default:
+ ss << "oddtype";
+ break;
}
}
@@ -1140,8 +1139,7 @@
}
static bool is_narrow_numeric_type(spirv_inst_iter type) {
- if (type.opcode() != spv::OpTypeInt && type.opcode() != spv::OpTypeFloat)
- return false;
+ if (type.opcode() != spv::OpTypeInt && type.opcode() != spv::OpTypeFloat) return false;
return type.word(2) < 64;
}
@@ -1181,49 +1179,49 @@
}
switch (a_insn.opcode()) {
- case spv::OpTypeBool:
- return true;
- case spv::OpTypeInt:
- // Match on width, signedness
- return a_insn.word(2) == b_insn.word(2) && a_insn.word(3) == b_insn.word(3);
- case spv::OpTypeFloat:
- // Match on width
- return a_insn.word(2) == b_insn.word(2);
- case spv::OpTypeVector:
- // Match on element type, count.
- if (!types_match(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false))
- return false;
- if (relaxed && is_narrow_numeric_type(a->get_def(a_insn.word(2)))) {
- return a_insn.word(3) >= b_insn.word(3);
- } else {
- return a_insn.word(3) == b_insn.word(3);
- }
- case spv::OpTypeMatrix:
- // Match on element type, count.
- return types_match(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) && a_insn.word(3) == b_insn.word(3);
- case spv::OpTypeArray:
- // Match on element type, count. these all have the same layout. we don't get here if b_arrayed. This differs from
- // vector & matrix types in that the array size is the id of a constant instruction, * not a literal within OpTypeArray
- return types_match(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
- get_constant_value(a, a_insn.word(3)) == get_constant_value(b, b_insn.word(3));
- case spv::OpTypeStruct:
- // Match on all element types
- {
- if (a_insn.len() != b_insn.len()) {
- return false; // Structs cannot match if member counts differ
- }
-
- for (unsigned i = 2; i < a_insn.len(); i++) {
- if (!types_match(a, b, a_insn.word(i), b_insn.word(i), a_arrayed, b_arrayed, false)) {
- return false;
- }
- }
-
+ case spv::OpTypeBool:
return true;
- }
- default:
- // Remaining types are CLisms, or may not appear in the interfaces we are interested in. Just claim no match.
- return false;
+ case spv::OpTypeInt:
+ // Match on width, signedness
+ return a_insn.word(2) == b_insn.word(2) && a_insn.word(3) == b_insn.word(3);
+ case spv::OpTypeFloat:
+ // Match on width
+ return a_insn.word(2) == b_insn.word(2);
+ case spv::OpTypeVector:
+ // Match on element type, count.
+ if (!types_match(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false)) return false;
+ if (relaxed && is_narrow_numeric_type(a->get_def(a_insn.word(2)))) {
+ return a_insn.word(3) >= b_insn.word(3);
+ } else {
+ return a_insn.word(3) == b_insn.word(3);
+ }
+ case spv::OpTypeMatrix:
+ // Match on element type, count.
+ return types_match(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
+ a_insn.word(3) == b_insn.word(3);
+ case spv::OpTypeArray:
+ // Match on element type, count. these all have the same layout. we don't get here if b_arrayed. This differs from
+ // vector & matrix types in that the array size is the id of a constant instruction, * not a literal within OpTypeArray
+ return types_match(a, b, a_insn.word(2), b_insn.word(2), a_arrayed, b_arrayed, false) &&
+ get_constant_value(a, a_insn.word(3)) == get_constant_value(b, b_insn.word(3));
+ case spv::OpTypeStruct:
+ // Match on all element types
+ {
+ if (a_insn.len() != b_insn.len()) {
+ return false; // Structs cannot match if member counts differ
+ }
+
+ for (unsigned i = 2; i < a_insn.len(); i++) {
+ if (!types_match(a, b, a_insn.word(i), b_insn.word(i), a_arrayed, b_arrayed, false)) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+ default:
+ // Remaining types are CLisms, or may not appear in the interfaces we are interested in. Just claim no match.
+ return false;
}
}
@@ -1240,46 +1238,46 @@
assert(insn != src->end());
switch (insn.opcode()) {
- case spv::OpTypePointer:
- // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
- // pointers around.
- return get_locations_consumed_by_type(src, insn.word(3), strip_array_level);
- case spv::OpTypeArray:
- if (strip_array_level) {
- return get_locations_consumed_by_type(src, insn.word(2), false);
- } else {
- return get_constant_value(src, insn.word(3)) * get_locations_consumed_by_type(src, insn.word(2), false);
+ case spv::OpTypePointer:
+ // See through the ptr -- this is only ever at the toplevel for graphics shaders we're never actually passing
+ // pointers around.
+ return get_locations_consumed_by_type(src, insn.word(3), strip_array_level);
+ case spv::OpTypeArray:
+ if (strip_array_level) {
+ return get_locations_consumed_by_type(src, insn.word(2), false);
+ } else {
+ return get_constant_value(src, insn.word(3)) * get_locations_consumed_by_type(src, insn.word(2), false);
+ }
+ case spv::OpTypeMatrix:
+ // Num locations is the dimension * element size
+ return insn.word(3) * get_locations_consumed_by_type(src, insn.word(2), false);
+ case spv::OpTypeVector: {
+ auto scalar_type = src->get_def(insn.word(2));
+ auto bit_width =
+ (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
+
+ // Locations are 128-bit wide; 3- and 4-component vectors of 64 bit types require two.
+ return (bit_width * insn.word(3) + 127) / 128;
}
- case spv::OpTypeMatrix:
- // Num locations is the dimension * element size
- return insn.word(3) * get_locations_consumed_by_type(src, insn.word(2), false);
- case spv::OpTypeVector: {
- auto scalar_type = src->get_def(insn.word(2));
- auto bit_width =
- (scalar_type.opcode() == spv::OpTypeInt || scalar_type.opcode() == spv::OpTypeFloat) ? scalar_type.word(2) : 32;
+ default:
+ // Everything else is just 1.
+ return 1;
- // Locations are 128-bit wide; 3- and 4-component vectors of 64 bit types require two.
- return (bit_width * insn.word(3) + 127) / 128;
- }
- default:
- // Everything else is just 1.
- return 1;
-
- // TODO: extend to handle 64bit scalar types, whose vectors may need multiple locations.
+ // TODO: extend to handle 64bit scalar types, whose vectors may need multiple locations.
}
}
static unsigned get_locations_consumed_by_format(VkFormat format) {
switch (format) {
- case VK_FORMAT_R64G64B64A64_SFLOAT:
- case VK_FORMAT_R64G64B64A64_SINT:
- case VK_FORMAT_R64G64B64A64_UINT:
- case VK_FORMAT_R64G64B64_SFLOAT:
- case VK_FORMAT_R64G64B64_SINT:
- case VK_FORMAT_R64G64B64_UINT:
- return 2;
- default:
- return 1;
+ case VK_FORMAT_R64G64B64A64_SFLOAT:
+ case VK_FORMAT_R64G64B64A64_SINT:
+ case VK_FORMAT_R64G64B64A64_UINT:
+ case VK_FORMAT_R64G64B64_SFLOAT:
+ case VK_FORMAT_R64G64B64_SINT:
+ case VK_FORMAT_R64G64B64_UINT:
+ return 2;
+ default:
+ return 1;
}
}
@@ -1309,7 +1307,6 @@
static spirv_inst_iter get_struct_type(shader_module const *src, spirv_inst_iter def, bool is_array_of_verts) {
while (true) {
-
if (def.opcode() == spv::OpTypePointer) {
def = src->get_def(def.word(3));
} else if (def.opcode() == spv::OpTypeArray && is_array_of_verts) {
@@ -1383,7 +1380,6 @@
static std::map<location_t, interface_var> collect_interface_by_location(shader_module const *src, spirv_inst_iter entrypoint,
spv::StorageClass sinterface, bool is_array_of_verts) {
-
std::unordered_map<unsigned, unsigned> var_locations;
std::unordered_map<unsigned, unsigned> var_builtins;
std::unordered_map<unsigned, unsigned> var_components;
@@ -1392,7 +1388,6 @@
std::unordered_map<unsigned, unsigned> var_relaxed_precision;
for (auto insn : *src) {
-
// We consider two interface models: SSO rendezvous-by-location, and builtins. Complain about anything that
// fits neither model.
if (insn.opcode() == spv::OpDecorate) {
@@ -1446,7 +1441,7 @@
int location = value_or_default(var_locations, id, -1);
int builtin = value_or_default(var_builtins, id, -1);
- unsigned component = value_or_default(var_components, id, 0); // Unspecified is OK, is 0
+ unsigned component = value_or_default(var_components, id, 0); // Unspecified is OK, is 0
bool is_patch = var_patch.find(id) != var_patch.end();
bool is_relaxed_precision = var_relaxed_precision.find(id) != var_relaxed_precision.end();
@@ -1480,10 +1475,8 @@
return out;
}
-static std::vector<std::pair<uint32_t, interface_var>>
-collect_interface_by_input_attachment_index(debug_report_data *report_data, shader_module const *src,
- std::unordered_set<uint32_t> const &accessible_ids) {
-
+static std::vector<std::pair<uint32_t, interface_var>> collect_interface_by_input_attachment_index(
+ debug_report_data *report_data, shader_module const *src, std::unordered_set<uint32_t> const &accessible_ids) {
std::vector<std::pair<uint32_t, interface_var>> out;
for (auto insn : *src) {
@@ -1514,10 +1507,8 @@
return out;
}
-static std::vector<std::pair<descriptor_slot_t, interface_var>>
-collect_interface_by_descriptor_slot(debug_report_data *report_data, shader_module const *src,
- std::unordered_set<uint32_t> const &accessible_ids) {
-
+static std::vector<std::pair<descriptor_slot_t, interface_var>> collect_interface_by_descriptor_slot(
+ debug_report_data *report_data, shader_module const *src, std::unordered_set<uint32_t> const &accessible_ids) {
std::unordered_map<unsigned, unsigned> var_sets;
std::unordered_map<unsigned, unsigned> var_bindings;
@@ -1634,61 +1625,61 @@
enum FORMAT_TYPE {
FORMAT_TYPE_UNDEFINED,
- FORMAT_TYPE_FLOAT, // UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader
+ FORMAT_TYPE_FLOAT, // UNORM, SNORM, FLOAT, USCALED, SSCALED, SRGB -- anything we consider float in the shader
FORMAT_TYPE_SINT,
FORMAT_TYPE_UINT,
};
static unsigned get_format_type(VkFormat fmt) {
switch (fmt) {
- case VK_FORMAT_UNDEFINED:
- return FORMAT_TYPE_UNDEFINED;
- case VK_FORMAT_R8_SINT:
- case VK_FORMAT_R8G8_SINT:
- case VK_FORMAT_R8G8B8_SINT:
- case VK_FORMAT_R8G8B8A8_SINT:
- case VK_FORMAT_R16_SINT:
- case VK_FORMAT_R16G16_SINT:
- case VK_FORMAT_R16G16B16_SINT:
- case VK_FORMAT_R16G16B16A16_SINT:
- case VK_FORMAT_R32_SINT:
- case VK_FORMAT_R32G32_SINT:
- case VK_FORMAT_R32G32B32_SINT:
- case VK_FORMAT_R32G32B32A32_SINT:
- case VK_FORMAT_R64_SINT:
- case VK_FORMAT_R64G64_SINT:
- case VK_FORMAT_R64G64B64_SINT:
- case VK_FORMAT_R64G64B64A64_SINT:
- case VK_FORMAT_B8G8R8_SINT:
- case VK_FORMAT_B8G8R8A8_SINT:
- case VK_FORMAT_A8B8G8R8_SINT_PACK32:
- case VK_FORMAT_A2B10G10R10_SINT_PACK32:
- case VK_FORMAT_A2R10G10B10_SINT_PACK32:
- return FORMAT_TYPE_SINT;
- case VK_FORMAT_R8_UINT:
- case VK_FORMAT_R8G8_UINT:
- case VK_FORMAT_R8G8B8_UINT:
- case VK_FORMAT_R8G8B8A8_UINT:
- case VK_FORMAT_R16_UINT:
- case VK_FORMAT_R16G16_UINT:
- case VK_FORMAT_R16G16B16_UINT:
- case VK_FORMAT_R16G16B16A16_UINT:
- case VK_FORMAT_R32_UINT:
- case VK_FORMAT_R32G32_UINT:
- case VK_FORMAT_R32G32B32_UINT:
- case VK_FORMAT_R32G32B32A32_UINT:
- case VK_FORMAT_R64_UINT:
- case VK_FORMAT_R64G64_UINT:
- case VK_FORMAT_R64G64B64_UINT:
- case VK_FORMAT_R64G64B64A64_UINT:
- case VK_FORMAT_B8G8R8_UINT:
- case VK_FORMAT_B8G8R8A8_UINT:
- case VK_FORMAT_A8B8G8R8_UINT_PACK32:
- case VK_FORMAT_A2B10G10R10_UINT_PACK32:
- case VK_FORMAT_A2R10G10B10_UINT_PACK32:
- return FORMAT_TYPE_UINT;
- default:
- return FORMAT_TYPE_FLOAT;
+ case VK_FORMAT_UNDEFINED:
+ return FORMAT_TYPE_UNDEFINED;
+ case VK_FORMAT_R8_SINT:
+ case VK_FORMAT_R8G8_SINT:
+ case VK_FORMAT_R8G8B8_SINT:
+ case VK_FORMAT_R8G8B8A8_SINT:
+ case VK_FORMAT_R16_SINT:
+ case VK_FORMAT_R16G16_SINT:
+ case VK_FORMAT_R16G16B16_SINT:
+ case VK_FORMAT_R16G16B16A16_SINT:
+ case VK_FORMAT_R32_SINT:
+ case VK_FORMAT_R32G32_SINT:
+ case VK_FORMAT_R32G32B32_SINT:
+ case VK_FORMAT_R32G32B32A32_SINT:
+ case VK_FORMAT_R64_SINT:
+ case VK_FORMAT_R64G64_SINT:
+ case VK_FORMAT_R64G64B64_SINT:
+ case VK_FORMAT_R64G64B64A64_SINT:
+ case VK_FORMAT_B8G8R8_SINT:
+ case VK_FORMAT_B8G8R8A8_SINT:
+ case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+ case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+ return FORMAT_TYPE_SINT;
+ case VK_FORMAT_R8_UINT:
+ case VK_FORMAT_R8G8_UINT:
+ case VK_FORMAT_R8G8B8_UINT:
+ case VK_FORMAT_R8G8B8A8_UINT:
+ case VK_FORMAT_R16_UINT:
+ case VK_FORMAT_R16G16_UINT:
+ case VK_FORMAT_R16G16B16_UINT:
+ case VK_FORMAT_R16G16B16A16_UINT:
+ case VK_FORMAT_R32_UINT:
+ case VK_FORMAT_R32G32_UINT:
+ case VK_FORMAT_R32G32B32_UINT:
+ case VK_FORMAT_R32G32B32A32_UINT:
+ case VK_FORMAT_R64_UINT:
+ case VK_FORMAT_R64G64_UINT:
+ case VK_FORMAT_R64G64B64_UINT:
+ case VK_FORMAT_R64G64B64A64_UINT:
+ case VK_FORMAT_B8G8R8_UINT:
+ case VK_FORMAT_B8G8R8A8_UINT:
+ case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+ case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+ return FORMAT_TYPE_UINT;
+ default:
+ return FORMAT_TYPE_FLOAT;
}
}
@@ -1698,23 +1689,23 @@
assert(insn != src->end());
switch (insn.opcode()) {
- case spv::OpTypeInt:
- return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
- case spv::OpTypeFloat:
- return FORMAT_TYPE_FLOAT;
- case spv::OpTypeVector:
- return get_fundamental_type(src, insn.word(2));
- case spv::OpTypeMatrix:
- return get_fundamental_type(src, insn.word(2));
- case spv::OpTypeArray:
- return get_fundamental_type(src, insn.word(2));
- case spv::OpTypePointer:
- return get_fundamental_type(src, insn.word(3));
- case spv::OpTypeImage:
- return get_fundamental_type(src, insn.word(2));
+ case spv::OpTypeInt:
+ return insn.word(3) ? FORMAT_TYPE_SINT : FORMAT_TYPE_UINT;
+ case spv::OpTypeFloat:
+ return FORMAT_TYPE_FLOAT;
+ case spv::OpTypeVector:
+ return get_fundamental_type(src, insn.word(2));
+ case spv::OpTypeMatrix:
+ return get_fundamental_type(src, insn.word(2));
+ case spv::OpTypeArray:
+ return get_fundamental_type(src, insn.word(2));
+ case spv::OpTypePointer:
+ return get_fundamental_type(src, insn.word(3));
+ case spv::OpTypeImage:
+ return get_fundamental_type(src, insn.word(2));
- default:
- return FORMAT_TYPE_UNDEFINED;
+ default:
+ return FORMAT_TYPE_UNDEFINED;
}
}
@@ -1818,8 +1809,7 @@
auto subpass = rpci->pSubpasses[subpass_index];
for (auto i = 0u; i < subpass.colorAttachmentCount; ++i) {
uint32_t attachment = subpass.pColorAttachments[i].attachment;
- if (attachment == VK_ATTACHMENT_UNUSED)
- continue;
+ if (attachment == VK_ATTACHMENT_UNUSED) continue;
if (rpci->pAttachments[attachment].format != VK_FORMAT_UNDEFINED) {
color_attachments[i] = rpci->pAttachments[attachment].format;
}
@@ -1902,92 +1892,92 @@
// Try to add to the output set
if (!ids.insert(id).second) {
- continue; // If we already saw this id, we don't want to walk it again.
+ continue; // If we already saw this id, we don't want to walk it again.
}
switch (insn.opcode()) {
- case spv::OpFunction:
- // Scan whole body of the function, enlisting anything interesting
- while (++insn, insn.opcode() != spv::OpFunctionEnd) {
- switch (insn.opcode()) {
- case spv::OpLoad:
- case spv::OpAtomicLoad:
- case spv::OpAtomicExchange:
- case spv::OpAtomicCompareExchange:
- case spv::OpAtomicCompareExchangeWeak:
- case spv::OpAtomicIIncrement:
- case spv::OpAtomicIDecrement:
- case spv::OpAtomicIAdd:
- case spv::OpAtomicISub:
- case spv::OpAtomicSMin:
- case spv::OpAtomicUMin:
- case spv::OpAtomicSMax:
- case spv::OpAtomicUMax:
- case spv::OpAtomicAnd:
- case spv::OpAtomicOr:
- case spv::OpAtomicXor:
- worklist.insert(insn.word(3)); // ptr
- break;
- case spv::OpStore:
- case spv::OpAtomicStore:
- worklist.insert(insn.word(1)); // ptr
- break;
- case spv::OpAccessChain:
- case spv::OpInBoundsAccessChain:
- worklist.insert(insn.word(3)); // base ptr
- break;
- case spv::OpSampledImage:
- case spv::OpImageSampleImplicitLod:
- case spv::OpImageSampleExplicitLod:
- case spv::OpImageSampleDrefImplicitLod:
- case spv::OpImageSampleDrefExplicitLod:
- case spv::OpImageSampleProjImplicitLod:
- case spv::OpImageSampleProjExplicitLod:
- case spv::OpImageSampleProjDrefImplicitLod:
- case spv::OpImageSampleProjDrefExplicitLod:
- case spv::OpImageFetch:
- case spv::OpImageGather:
- case spv::OpImageDrefGather:
- case spv::OpImageRead:
- case spv::OpImage:
- case spv::OpImageQueryFormat:
- case spv::OpImageQueryOrder:
- case spv::OpImageQuerySizeLod:
- case spv::OpImageQuerySize:
- case spv::OpImageQueryLod:
- case spv::OpImageQueryLevels:
- case spv::OpImageQuerySamples:
- case spv::OpImageSparseSampleImplicitLod:
- case spv::OpImageSparseSampleExplicitLod:
- case spv::OpImageSparseSampleDrefImplicitLod:
- case spv::OpImageSparseSampleDrefExplicitLod:
- case spv::OpImageSparseSampleProjImplicitLod:
- case spv::OpImageSparseSampleProjExplicitLod:
- case spv::OpImageSparseSampleProjDrefImplicitLod:
- case spv::OpImageSparseSampleProjDrefExplicitLod:
- case spv::OpImageSparseFetch:
- case spv::OpImageSparseGather:
- case spv::OpImageSparseDrefGather:
- case spv::OpImageTexelPointer:
- worklist.insert(insn.word(3)); // Image or sampled image
- break;
- case spv::OpImageWrite:
- worklist.insert(insn.word(1)); // Image -- different operand order to above
- break;
- case spv::OpFunctionCall:
- for (uint32_t i = 3; i < insn.len(); i++) {
- worklist.insert(insn.word(i)); // fn itself, and all args
- }
- break;
+ case spv::OpFunction:
+ // Scan whole body of the function, enlisting anything interesting
+ while (++insn, insn.opcode() != spv::OpFunctionEnd) {
+ switch (insn.opcode()) {
+ case spv::OpLoad:
+ case spv::OpAtomicLoad:
+ case spv::OpAtomicExchange:
+ case spv::OpAtomicCompareExchange:
+ case spv::OpAtomicCompareExchangeWeak:
+ case spv::OpAtomicIIncrement:
+ case spv::OpAtomicIDecrement:
+ case spv::OpAtomicIAdd:
+ case spv::OpAtomicISub:
+ case spv::OpAtomicSMin:
+ case spv::OpAtomicUMin:
+ case spv::OpAtomicSMax:
+ case spv::OpAtomicUMax:
+ case spv::OpAtomicAnd:
+ case spv::OpAtomicOr:
+ case spv::OpAtomicXor:
+ worklist.insert(insn.word(3)); // ptr
+ break;
+ case spv::OpStore:
+ case spv::OpAtomicStore:
+ worklist.insert(insn.word(1)); // ptr
+ break;
+ case spv::OpAccessChain:
+ case spv::OpInBoundsAccessChain:
+ worklist.insert(insn.word(3)); // base ptr
+ break;
+ case spv::OpSampledImage:
+ case spv::OpImageSampleImplicitLod:
+ case spv::OpImageSampleExplicitLod:
+ case spv::OpImageSampleDrefImplicitLod:
+ case spv::OpImageSampleDrefExplicitLod:
+ case spv::OpImageSampleProjImplicitLod:
+ case spv::OpImageSampleProjExplicitLod:
+ case spv::OpImageSampleProjDrefImplicitLod:
+ case spv::OpImageSampleProjDrefExplicitLod:
+ case spv::OpImageFetch:
+ case spv::OpImageGather:
+ case spv::OpImageDrefGather:
+ case spv::OpImageRead:
+ case spv::OpImage:
+ case spv::OpImageQueryFormat:
+ case spv::OpImageQueryOrder:
+ case spv::OpImageQuerySizeLod:
+ case spv::OpImageQuerySize:
+ case spv::OpImageQueryLod:
+ case spv::OpImageQueryLevels:
+ case spv::OpImageQuerySamples:
+ case spv::OpImageSparseSampleImplicitLod:
+ case spv::OpImageSparseSampleExplicitLod:
+ case spv::OpImageSparseSampleDrefImplicitLod:
+ case spv::OpImageSparseSampleDrefExplicitLod:
+ case spv::OpImageSparseSampleProjImplicitLod:
+ case spv::OpImageSparseSampleProjExplicitLod:
+ case spv::OpImageSparseSampleProjDrefImplicitLod:
+ case spv::OpImageSparseSampleProjDrefExplicitLod:
+ case spv::OpImageSparseFetch:
+ case spv::OpImageSparseGather:
+ case spv::OpImageSparseDrefGather:
+ case spv::OpImageTexelPointer:
+ worklist.insert(insn.word(3)); // Image or sampled image
+ break;
+ case spv::OpImageWrite:
+ worklist.insert(insn.word(1)); // Image -- different operand order to above
+ break;
+ case spv::OpFunctionCall:
+ for (uint32_t i = 3; i < insn.len(); i++) {
+ worklist.insert(insn.word(i)); // fn itself, and all args
+ }
+ break;
- case spv::OpExtInst:
- for (uint32_t i = 5; i < insn.len(); i++) {
- worklist.insert(insn.word(i)); // Operands to ext inst
+ case spv::OpExtInst:
+ for (uint32_t i = 5; i < insn.len(); i++) {
+ worklist.insert(insn.word(i)); // Operands to ext inst
+ }
+ break;
}
- break;
}
- }
- break;
+ break;
}
}
@@ -2008,10 +1998,9 @@
// TODO: arrays, matrices, weird sizes
for (auto insn : *src) {
if (insn.opcode() == spv::OpMemberDecorate && insn.word(1) == type.word(1)) {
-
if (insn.word(3) == spv::DecorationOffset) {
unsigned offset = insn.word(4);
- auto size = 4; // Bytes; TODO: calculate this based on the type
+ auto size = 4; // Bytes; TODO: calculate this based on the type
bool found_range = false;
for (auto const &range : *push_constant_ranges) {
@@ -2068,12 +2057,9 @@
// has the requested binding at slot.second and return ptr to that binding
static VkDescriptorSetLayoutBinding const *get_descriptor_binding(PIPELINE_LAYOUT_NODE const *pipelineLayout,
descriptor_slot_t slot) {
+ if (!pipelineLayout) return nullptr;
- if (!pipelineLayout)
- return nullptr;
-
- if (slot.first >= pipelineLayout->set_layouts.size())
- return nullptr;
+ if (slot.first >= pipelineLayout->set_layouts.size()) return nullptr;
return pipelineLayout->set_layouts[slot.first]->GetDescriptorSetLayoutBindingPtrFromBinding(slot.second);
}
@@ -2087,8 +2073,7 @@
static bool hasDrawCmd(GLOBAL_CB_NODE *pCB) {
for (uint32_t i = 0; i < NUM_DRAW_TYPES; i++) {
- if (pCB->drawCount[i])
- return true;
+ if (pCB->drawCount[i]) return true;
}
return false;
}
@@ -2150,8 +2135,7 @@
static bool isDynamic(const PIPELINE_STATE *pPipeline, const VkDynamicState state) {
if (pPipeline && pPipeline->graphicsPipelineCI.pDynamicState) {
for (uint32_t i = 0; i < pPipeline->graphicsPipelineCI.pDynamicState->dynamicStateCount; i++) {
- if (state == pPipeline->graphicsPipelineCI.pDynamicState->pDynamicStates[i])
- return true;
+ if (state == pPipeline->graphicsPipelineCI.pDynamicState->pDynamicStates[i]) return true;
}
}
return false;
@@ -2216,13 +2200,11 @@
} else if (pSecondary == nullptr) {
return false;
}
- if (index >= primaryCount) { // Check secondary as if primary is VK_ATTACHMENT_UNUSED
- if (VK_ATTACHMENT_UNUSED == pSecondary[index].attachment)
- return true;
- } else if (index >= secondaryCount) { // Check primary as if secondary is VK_ATTACHMENT_UNUSED
- if (VK_ATTACHMENT_UNUSED == pPrimary[index].attachment)
- return true;
- } else { // Format and sample count must match
+ if (index >= primaryCount) { // Check secondary as if primary is VK_ATTACHMENT_UNUSED
+ if (VK_ATTACHMENT_UNUSED == pSecondary[index].attachment) return true;
+ } else if (index >= secondaryCount) { // Check primary as if secondary is VK_ATTACHMENT_UNUSED
+ if (VK_ATTACHMENT_UNUSED == pPrimary[index].attachment) return true;
+ } else { // Format and sample count must match
if ((pPrimary[index].attachment == VK_ATTACHMENT_UNUSED) && (pSecondary[index].attachment == VK_ATTACHMENT_UNUSED)) {
return true;
} else if ((pPrimary[index].attachment == VK_ATTACHMENT_UNUSED) || (pSecondary[index].attachment == VK_ATTACHMENT_UNUSED)) {
@@ -2335,7 +2317,6 @@
i, spec->pMapEntries[i].constantID, spec->pMapEntries[i].offset,
spec->pMapEntries[i].offset + spec->pMapEntries[i].size - 1, spec->dataSize,
validation_error_map[VALIDATION_ERROR_00590])) {
-
pass = false;
}
}
@@ -2362,70 +2343,71 @@
}
switch (type.opcode()) {
- case spv::OpTypeStruct: {
- for (auto insn : *module) {
- if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
- if (insn.word(2) == spv::DecorationBlock) {
- return descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
- descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
- } else if (insn.word(2) == spv::DecorationBufferBlock) {
- return descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
- descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+ case spv::OpTypeStruct: {
+ for (auto insn : *module) {
+ if (insn.opcode() == spv::OpDecorate && insn.word(1) == type.word(1)) {
+ if (insn.word(2) == spv::DecorationBlock) {
+ return descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
+ descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ } else if (insn.word(2) == spv::DecorationBufferBlock) {
+ return descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
+ descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+ }
}
}
+
+ // Invalid
+ return false;
}
- // Invalid
- return false;
- }
+ case spv::OpTypeSampler:
+ return descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLER || descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- case spv::OpTypeSampler:
- return descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLER || descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
-
- case spv::OpTypeSampledImage:
- if (descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) {
- // Slight relaxation for some GLSL historical madness: samplerBuffer doesn't really have a sampler, and a texel
- // buffer descriptor doesn't really provide one. Allow this slight mismatch.
- auto image_type = module->get_def(type.word(2));
- auto dim = image_type.word(3);
- auto sampled = image_type.word(7);
- return dim == spv::DimBuffer && sampled == 1;
- }
- return descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
-
- case spv::OpTypeImage: {
- // Many descriptor types backing image types-- depends on dimension and whether the image will be used with a sampler.
- // SPIRV for Vulkan requires that sampled be 1 or 2 -- leaving the decision to runtime is unacceptable.
- auto dim = type.word(3);
- auto sampled = type.word(7);
-
- if (dim == spv::DimSubpassData) {
- return descriptor_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
- } else if (dim == spv::DimBuffer) {
- if (sampled == 1) {
- return descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
- } else {
- return descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ case spv::OpTypeSampledImage:
+ if (descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) {
+ // Slight relaxation for some GLSL historical madness: samplerBuffer doesn't really have a sampler, and a texel
+ // buffer descriptor doesn't really provide one. Allow this slight mismatch.
+ auto image_type = module->get_def(type.word(2));
+ auto dim = image_type.word(3);
+ auto sampled = image_type.word(7);
+ return dim == spv::DimBuffer && sampled == 1;
}
- } else if (sampled == 1) {
- return descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
- descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- } else {
- return descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
- }
- }
+ return descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
- // We shouldn't really see any other junk types -- but if we do, they're a mismatch.
- default:
- return false; // Mismatch
+ case spv::OpTypeImage: {
+ // Many descriptor types backing image types-- depends on dimension and whether the image will be used with a sampler.
+ // SPIRV for Vulkan requires that sampled be 1 or 2 -- leaving the decision to runtime is unacceptable.
+ auto dim = type.word(3);
+ auto sampled = type.word(7);
+
+ if (dim == spv::DimSubpassData) {
+ return descriptor_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+ } else if (dim == spv::DimBuffer) {
+ if (sampled == 1) {
+ return descriptor_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+ } else {
+ return descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
+ }
+ } else if (sampled == 1) {
+ return descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
+ descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+ } else {
+ return descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
+ }
+ }
+
+ // We shouldn't really see any other junk types -- but if we do, they're a mismatch.
+ default:
+ return false; // Mismatch
}
}
static bool require_feature(debug_report_data *report_data, VkBool32 feature, char const *feature_name) {
if (!feature) {
if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__,
- SHADER_CHECKER_FEATURE_NOT_ENABLED, "SC", "Shader requires VkPhysicalDeviceFeatures::%s but is not "
- "enabled on the device",
+ SHADER_CHECKER_FEATURE_NOT_ENABLED, "SC",
+ "Shader requires VkPhysicalDeviceFeatures::%s but is not "
+ "enabled on the device",
feature_name)) {
return false;
}
@@ -2441,131 +2423,131 @@
for (auto insn : *src) {
if (insn.opcode() == spv::OpCapability) {
switch (insn.word(1)) {
- case spv::CapabilityMatrix:
- case spv::CapabilityShader:
- case spv::CapabilityInputAttachment:
- case spv::CapabilitySampled1D:
- case spv::CapabilityImage1D:
- case spv::CapabilitySampledBuffer:
- case spv::CapabilityImageBuffer:
- case spv::CapabilityImageQuery:
- case spv::CapabilityDerivativeControl:
- // Always supported by a Vulkan 1.0 implementation -- no feature bits.
- break;
+ case spv::CapabilityMatrix:
+ case spv::CapabilityShader:
+ case spv::CapabilityInputAttachment:
+ case spv::CapabilitySampled1D:
+ case spv::CapabilityImage1D:
+ case spv::CapabilitySampledBuffer:
+ case spv::CapabilityImageBuffer:
+ case spv::CapabilityImageQuery:
+ case spv::CapabilityDerivativeControl:
+ // Always supported by a Vulkan 1.0 implementation -- no feature bits.
+ break;
- case spv::CapabilityGeometry:
- pass &= require_feature(report_data, enabledFeatures->geometryShader, "geometryShader");
- break;
+ case spv::CapabilityGeometry:
+ pass &= require_feature(report_data, enabledFeatures->geometryShader, "geometryShader");
+ break;
- case spv::CapabilityTessellation:
- pass &= require_feature(report_data, enabledFeatures->tessellationShader, "tessellationShader");
- break;
+ case spv::CapabilityTessellation:
+ pass &= require_feature(report_data, enabledFeatures->tessellationShader, "tessellationShader");
+ break;
- case spv::CapabilityFloat64:
- pass &= require_feature(report_data, enabledFeatures->shaderFloat64, "shaderFloat64");
- break;
+ case spv::CapabilityFloat64:
+ pass &= require_feature(report_data, enabledFeatures->shaderFloat64, "shaderFloat64");
+ break;
- case spv::CapabilityInt64:
- pass &= require_feature(report_data, enabledFeatures->shaderInt64, "shaderInt64");
- break;
+ case spv::CapabilityInt64:
+ pass &= require_feature(report_data, enabledFeatures->shaderInt64, "shaderInt64");
+ break;
- case spv::CapabilityTessellationPointSize:
- case spv::CapabilityGeometryPointSize:
- pass &= require_feature(report_data, enabledFeatures->shaderTessellationAndGeometryPointSize,
- "shaderTessellationAndGeometryPointSize");
- break;
+ case spv::CapabilityTessellationPointSize:
+ case spv::CapabilityGeometryPointSize:
+ pass &= require_feature(report_data, enabledFeatures->shaderTessellationAndGeometryPointSize,
+ "shaderTessellationAndGeometryPointSize");
+ break;
- case spv::CapabilityImageGatherExtended:
- pass &= require_feature(report_data, enabledFeatures->shaderImageGatherExtended, "shaderImageGatherExtended");
- break;
+ case spv::CapabilityImageGatherExtended:
+ pass &= require_feature(report_data, enabledFeatures->shaderImageGatherExtended, "shaderImageGatherExtended");
+ break;
- case spv::CapabilityStorageImageMultisample:
- pass &=
- require_feature(report_data, enabledFeatures->shaderStorageImageMultisample, "shaderStorageImageMultisample");
- break;
+ case spv::CapabilityStorageImageMultisample:
+ pass &= require_feature(report_data, enabledFeatures->shaderStorageImageMultisample,
+ "shaderStorageImageMultisample");
+ break;
- case spv::CapabilityUniformBufferArrayDynamicIndexing:
- pass &= require_feature(report_data, enabledFeatures->shaderUniformBufferArrayDynamicIndexing,
- "shaderUniformBufferArrayDynamicIndexing");
- break;
+ case spv::CapabilityUniformBufferArrayDynamicIndexing:
+ pass &= require_feature(report_data, enabledFeatures->shaderUniformBufferArrayDynamicIndexing,
+ "shaderUniformBufferArrayDynamicIndexing");
+ break;
- case spv::CapabilitySampledImageArrayDynamicIndexing:
- pass &= require_feature(report_data, enabledFeatures->shaderSampledImageArrayDynamicIndexing,
- "shaderSampledImageArrayDynamicIndexing");
- break;
+ case spv::CapabilitySampledImageArrayDynamicIndexing:
+ pass &= require_feature(report_data, enabledFeatures->shaderSampledImageArrayDynamicIndexing,
+ "shaderSampledImageArrayDynamicIndexing");
+ break;
- case spv::CapabilityStorageBufferArrayDynamicIndexing:
- pass &= require_feature(report_data, enabledFeatures->shaderStorageBufferArrayDynamicIndexing,
- "shaderStorageBufferArrayDynamicIndexing");
- break;
+ case spv::CapabilityStorageBufferArrayDynamicIndexing:
+ pass &= require_feature(report_data, enabledFeatures->shaderStorageBufferArrayDynamicIndexing,
+ "shaderStorageBufferArrayDynamicIndexing");
+ break;
- case spv::CapabilityStorageImageArrayDynamicIndexing:
- pass &= require_feature(report_data, enabledFeatures->shaderStorageImageArrayDynamicIndexing,
- "shaderStorageImageArrayDynamicIndexing");
- break;
+ case spv::CapabilityStorageImageArrayDynamicIndexing:
+ pass &= require_feature(report_data, enabledFeatures->shaderStorageImageArrayDynamicIndexing,
+ "shaderStorageImageArrayDynamicIndexing");
+ break;
- case spv::CapabilityClipDistance:
- pass &= require_feature(report_data, enabledFeatures->shaderClipDistance, "shaderClipDistance");
- break;
+ case spv::CapabilityClipDistance:
+ pass &= require_feature(report_data, enabledFeatures->shaderClipDistance, "shaderClipDistance");
+ break;
- case spv::CapabilityCullDistance:
- pass &= require_feature(report_data, enabledFeatures->shaderCullDistance, "shaderCullDistance");
- break;
+ case spv::CapabilityCullDistance:
+ pass &= require_feature(report_data, enabledFeatures->shaderCullDistance, "shaderCullDistance");
+ break;
- case spv::CapabilityImageCubeArray:
- pass &= require_feature(report_data, enabledFeatures->imageCubeArray, "imageCubeArray");
- break;
+ case spv::CapabilityImageCubeArray:
+ pass &= require_feature(report_data, enabledFeatures->imageCubeArray, "imageCubeArray");
+ break;
- case spv::CapabilitySampleRateShading:
- pass &= require_feature(report_data, enabledFeatures->sampleRateShading, "sampleRateShading");
- break;
+ case spv::CapabilitySampleRateShading:
+ pass &= require_feature(report_data, enabledFeatures->sampleRateShading, "sampleRateShading");
+ break;
- case spv::CapabilitySparseResidency:
- pass &= require_feature(report_data, enabledFeatures->shaderResourceResidency, "shaderResourceResidency");
- break;
+ case spv::CapabilitySparseResidency:
+ pass &= require_feature(report_data, enabledFeatures->shaderResourceResidency, "shaderResourceResidency");
+ break;
- case spv::CapabilityMinLod:
- pass &= require_feature(report_data, enabledFeatures->shaderResourceMinLod, "shaderResourceMinLod");
- break;
+ case spv::CapabilityMinLod:
+ pass &= require_feature(report_data, enabledFeatures->shaderResourceMinLod, "shaderResourceMinLod");
+ break;
- case spv::CapabilitySampledCubeArray:
- pass &= require_feature(report_data, enabledFeatures->imageCubeArray, "imageCubeArray");
- break;
+ case spv::CapabilitySampledCubeArray:
+ pass &= require_feature(report_data, enabledFeatures->imageCubeArray, "imageCubeArray");
+ break;
- case spv::CapabilityImageMSArray:
- pass &=
- require_feature(report_data, enabledFeatures->shaderStorageImageMultisample, "shaderStorageImageMultisample");
- break;
+ case spv::CapabilityImageMSArray:
+ pass &= require_feature(report_data, enabledFeatures->shaderStorageImageMultisample,
+ "shaderStorageImageMultisample");
+ break;
- case spv::CapabilityStorageImageExtendedFormats:
- pass &= require_feature(report_data, enabledFeatures->shaderStorageImageExtendedFormats,
- "shaderStorageImageExtendedFormats");
- break;
+ case spv::CapabilityStorageImageExtendedFormats:
+ pass &= require_feature(report_data, enabledFeatures->shaderStorageImageExtendedFormats,
+ "shaderStorageImageExtendedFormats");
+ break;
- case spv::CapabilityInterpolationFunction:
- pass &= require_feature(report_data, enabledFeatures->sampleRateShading, "sampleRateShading");
- break;
+ case spv::CapabilityInterpolationFunction:
+ pass &= require_feature(report_data, enabledFeatures->sampleRateShading, "sampleRateShading");
+ break;
- case spv::CapabilityStorageImageReadWithoutFormat:
- pass &= require_feature(report_data, enabledFeatures->shaderStorageImageReadWithoutFormat,
- "shaderStorageImageReadWithoutFormat");
- break;
+ case spv::CapabilityStorageImageReadWithoutFormat:
+ pass &= require_feature(report_data, enabledFeatures->shaderStorageImageReadWithoutFormat,
+ "shaderStorageImageReadWithoutFormat");
+ break;
- case spv::CapabilityStorageImageWriteWithoutFormat:
- pass &= require_feature(report_data, enabledFeatures->shaderStorageImageWriteWithoutFormat,
- "shaderStorageImageWriteWithoutFormat");
- break;
+ case spv::CapabilityStorageImageWriteWithoutFormat:
+ pass &= require_feature(report_data, enabledFeatures->shaderStorageImageWriteWithoutFormat,
+ "shaderStorageImageWriteWithoutFormat");
+ break;
- case spv::CapabilityMultiViewport:
- pass &= require_feature(report_data, enabledFeatures->multiViewport, "multiViewport");
- break;
+ case spv::CapabilityMultiViewport:
+ pass &= require_feature(report_data, enabledFeatures->multiViewport, "multiViewport");
+ break;
- default:
- if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__,
- SHADER_CHECKER_BAD_CAPABILITY, "SC", "Shader declares capability %u, not supported in Vulkan.",
- insn.word(1)))
- pass = false;
- break;
+ default:
+ if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__,
+ SHADER_CHECKER_BAD_CAPABILITY, "SC", "Shader declares capability %u, not supported in Vulkan.",
+ insn.word(1)))
+ pass = false;
+ break;
}
}
}
@@ -2578,45 +2560,44 @@
while (true) {
switch (type.opcode()) {
- case spv::OpTypeArray:
- case spv::OpTypeSampledImage:
- type = module->get_def(type.word(2));
- break;
- case spv::OpTypePointer:
- type = module->get_def(type.word(3));
- break;
- case spv::OpTypeImage: {
- auto dim = type.word(3);
- auto arrayed = type.word(5);
- auto msaa = type.word(6);
+ case spv::OpTypeArray:
+ case spv::OpTypeSampledImage:
+ type = module->get_def(type.word(2));
+ break;
+ case spv::OpTypePointer:
+ type = module->get_def(type.word(3));
+ break;
+ case spv::OpTypeImage: {
+ auto dim = type.word(3);
+ auto arrayed = type.word(5);
+ auto msaa = type.word(6);
- switch (dim) {
- case spv::Dim1D:
- return arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_1D;
- case spv::Dim2D:
- return (msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE) |
- (arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_2D);
- case spv::Dim3D:
- return DESCRIPTOR_REQ_VIEW_TYPE_3D;
- case spv::DimCube:
- return arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_CUBE;
- case spv::DimSubpassData:
- return msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
- default: // buffer, etc.
- return 0;
+ switch (dim) {
+ case spv::Dim1D:
+ return arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_1D;
+ case spv::Dim2D:
+ return (msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE) |
+ (arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_2D);
+ case spv::Dim3D:
+ return DESCRIPTOR_REQ_VIEW_TYPE_3D;
+ case spv::DimCube:
+ return arrayed ? DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY : DESCRIPTOR_REQ_VIEW_TYPE_CUBE;
+ case spv::DimSubpassData:
+ return msaa ? DESCRIPTOR_REQ_MULTI_SAMPLE : DESCRIPTOR_REQ_SINGLE_SAMPLE;
+ default: // buffer, etc.
+ return 0;
+ }
}
- }
- default:
- return 0;
+ default:
+ return 0;
}
}
}
-static bool
-validate_pipeline_shader_stage(debug_report_data *report_data, VkPipelineShaderStageCreateInfo const *pStage,
- PIPELINE_STATE *pipeline, shader_module **out_module, spirv_inst_iter *out_entrypoint,
- VkPhysicalDeviceFeatures const *enabledFeatures,
- std::unordered_map<VkShaderModule, std::unique_ptr<shader_module>> const &shaderModuleMap) {
+static bool validate_pipeline_shader_stage(
+ debug_report_data *report_data, VkPipelineShaderStageCreateInfo const *pStage, PIPELINE_STATE *pipeline,
+ shader_module **out_module, spirv_inst_iter *out_entrypoint, VkPhysicalDeviceFeatures const *enabledFeatures,
+ std::unordered_map<VkShaderModule, std::unique_ptr<shader_module>> const &shaderModuleMap) {
bool pass = true;
auto module_it = shaderModuleMap.find(pStage->module);
auto module = *out_module = module_it->second.get();
@@ -2627,7 +2608,7 @@
if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__, VALIDATION_ERROR_00510,
"SC", "No entrypoint found named `%s` for stage %s. %s.", pStage->pName,
string_VkShaderStageFlagBits(pStage->stage), validation_error_map[VALIDATION_ERROR_00510])) {
- return false; // no point continuing beyond here, any analysis is just going to be garbage.
+ return false; // no point continuing beyond here, any analysis is just going to be garbage.
}
}
@@ -2664,18 +2645,20 @@
}
} else if (~binding->stageFlags & pStage->stage) {
if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, 0, __LINE__,
- SHADER_CHECKER_DESCRIPTOR_NOT_ACCESSIBLE_FROM_STAGE, "SC", "Shader uses descriptor slot %u.%u (used "
- "as type `%s`) but descriptor not "
- "accessible from stage %s",
+ SHADER_CHECKER_DESCRIPTOR_NOT_ACCESSIBLE_FROM_STAGE, "SC",
+ "Shader uses descriptor slot %u.%u (used "
+ "as type `%s`) but descriptor not "
+ "accessible from stage %s",
use.first.first, use.first.second, describe_type(module, use.second.type_id).c_str(),
string_VkShaderStageFlagBits(pStage->stage))) {
pass = false;
}
} else if (!descriptor_type_match(module, use.second.type_id, binding->descriptorType, required_descriptor_count)) {
if (log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__,
- SHADER_CHECKER_DESCRIPTOR_TYPE_MISMATCH, "SC", "Type mismatch on descriptor slot "
- "%u.%u (used as type `%s`) but "
- "descriptor of type %s",
+ SHADER_CHECKER_DESCRIPTOR_TYPE_MISMATCH, "SC",
+ "Type mismatch on descriptor slot "
+ "%u.%u (used as type `%s`) but "
+ "descriptor of type %s",
use.first.first, use.first.second, describe_type(module, use.second.type_id).c_str(),
string_VkDescriptorType(binding->descriptorType))) {
pass = false;
@@ -2726,10 +2709,9 @@
// Validate that the shaders used by the given pipeline and store the active_slots
// that are actually used by the pipeline into pPipeline->active_slots
-static bool
-validate_and_capture_pipeline_shader_state(debug_report_data *report_data, PIPELINE_STATE *pPipeline,
- VkPhysicalDeviceFeatures const *enabledFeatures,
- std::unordered_map<VkShaderModule, unique_ptr<shader_module>> const &shaderModuleMap) {
+static bool validate_and_capture_pipeline_shader_state(
+ debug_report_data *report_data, PIPELINE_STATE *pPipeline, VkPhysicalDeviceFeatures const *enabledFeatures,
+ std::unordered_map<VkShaderModule, unique_ptr<shader_module>> const &shaderModuleMap) {
auto pCreateInfo = pPipeline->graphicsPipelineCI.ptr();
int vertex_stage = get_shader_stage_id(VK_SHADER_STAGE_VERTEX_BIT);
int fragment_stage = get_shader_stage_id(VK_SHADER_STAGE_FRAGMENT_BIT);
@@ -2749,8 +2731,7 @@
}
// if the shader stages are no good individually, cross-stage validation is pointless.
- if (!pass)
- return false;
+ if (!pass) return false;
vi = pCreateInfo->pVertexInputState;
@@ -2841,13 +2822,14 @@
auto vertex_binding = pPipeline->vertexBindingDescriptions[i].binding;
if ((pCB->currentDrawData.buffers.size() < (vertex_binding + 1)) ||
(pCB->currentDrawData.buffers[vertex_binding] == VK_NULL_HANDLE)) {
- skip_call |= log_msg(
- my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_VTX_INDEX_OUT_OF_BOUNDS, "DS",
- "The Pipeline State Object (0x%" PRIxLEAST64 ") expects that this Command Buffer's vertex binding Index %u "
- "should be set via vkCmdBindVertexBuffers. This is because VkVertexInputBindingDescription struct "
- "at index " PRINTF_SIZE_T_SPECIFIER " of pVertexBindingDescriptions has a binding value of %u.",
- (uint64_t)state.pipeline_state->pipeline, vertex_binding, i, vertex_binding);
+ skip_call |=
+ log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_VTX_INDEX_OUT_OF_BOUNDS, "DS",
+ "The Pipeline State Object (0x%" PRIxLEAST64
+ ") expects that this Command Buffer's vertex binding Index %u "
+ "should be set via vkCmdBindVertexBuffers. This is because VkVertexInputBindingDescription struct "
+ "at index " PRINTF_SIZE_T_SPECIFIER " of pVertexBindingDescriptions has a binding value of %u.",
+ (uint64_t)state.pipeline_state->pipeline, vertex_binding, i, vertex_binding);
}
}
} else {
@@ -2911,7 +2893,8 @@
log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
reinterpret_cast<const uint64_t &>(pPipeline->pipeline), __LINE__, DRAWSTATE_INVALID_RENDERPASS, "DS",
"Render pass subpass %u mismatch with blending state defined and blend state attachment "
- "count %u while subpass color attachment count %u in Pipeline (0x%" PRIxLEAST64 ")! These "
+ "count %u while subpass color attachment count %u in Pipeline (0x%" PRIxLEAST64
+ ")! These "
"must be the same at draw-time.",
pCB->activeSubpass, color_blend_state->attachmentCount, subpass_desc->colorAttachmentCount,
reinterpret_cast<const uint64_t &>(pPipeline->pipeline));
@@ -2957,7 +2940,8 @@
skip_call |=
log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
reinterpret_cast<const uint64_t &>(pPipeline->pipeline), __LINE__, DRAWSTATE_RENDERPASS_INCOMPATIBLE, "DS",
- "At Draw time the active render pass (0x%" PRIxLEAST64 ") is incompatible w/ gfx pipeline "
+ "At Draw time the active render pass (0x%" PRIxLEAST64
+ ") is incompatible w/ gfx pipeline "
"(0x%" PRIxLEAST64 ") that was created w/ render pass (0x%" PRIxLEAST64 ") due to: %s",
reinterpret_cast<uint64_t &>(pCB->activeRenderPass->renderPass),
reinterpret_cast<uint64_t const &>(pPipeline->pipeline),
@@ -2990,8 +2974,7 @@
DRAWSTATE_INVALID_PIPELINE, "DS",
"At Draw/Dispatch time no valid VkPipeline is bound! This is illegal. Please bind one with vkCmdBindPipeline().");
// Early return as any further checks below will be busted w/o a pipeline
- if (result)
- return true;
+ if (result) return true;
}
// First check flag states
if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point)
@@ -3021,7 +3004,7 @@
") bound as set #%u is not compatible with overlapping VkPipelineLayout 0x%" PRIxLEAST64 " due to: %s",
reinterpret_cast<uint64_t &>(setHandle), setIndex, reinterpret_cast<uint64_t &>(pipeline_layout.layout),
errorString.c_str());
- } else { // Valid set is bound and layout compatible, validate that it's updated
+ } else { // Valid set is bound and layout compatible, validate that it's updated
// Pull the set node
cvdescriptorset::DescriptorSet *descriptor_set = state.boundDescriptorSets[setIndex];
// Gather active bindings
@@ -3034,12 +3017,13 @@
if (!descriptor_set->IsUpdated()) {
for (auto binding : active_bindings) {
if (!descriptor_set->GetImmutableSamplerPtrFromBinding(binding)) {
- result |= log_msg(
- my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
- (uint64_t)descriptor_set->GetSet(), __LINE__, DRAWSTATE_DESCRIPTOR_SET_NOT_UPDATED, "DS",
- "Descriptor Set 0x%" PRIxLEAST64 " bound but was never updated. It is now being used to draw so "
- "this will result in undefined behavior.",
- (uint64_t)descriptor_set->GetSet());
+ result |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, (uint64_t)descriptor_set->GetSet(),
+ __LINE__, DRAWSTATE_DESCRIPTOR_SET_NOT_UPDATED, "DS",
+ "Descriptor Set 0x%" PRIxLEAST64
+ " bound but was never updated. It is now being used to draw so "
+ "this will result in undefined behavior.",
+ (uint64_t)descriptor_set->GetSet());
}
}
}
@@ -3058,8 +3042,7 @@
}
// Check general pipeline state that needs to be validated at drawtime
- if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point)
- result |= ValidatePipelineDrawtimeState(my_data, state, cb_node, pPipe);
+ if (VK_PIPELINE_BIND_POINT_GRAPHICS == bind_point) result |= ValidatePipelineDrawtimeState(my_data, state, cb_node, pPipe);
return result;
}
@@ -3090,16 +3073,18 @@
// First check to see if the physical device supports wide lines.
if ((VK_FALSE == my_data->enabled_features.wideLines) && (1.0f != lineWidth)) {
skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, target, __LINE__,
- dsError, "DS", "Attempt to set lineWidth to %f but physical device wideLines feature "
- "not supported/enabled so lineWidth must be 1.0f!",
+ dsError, "DS",
+ "Attempt to set lineWidth to %f but physical device wideLines feature "
+ "not supported/enabled so lineWidth must be 1.0f!",
lineWidth);
} else {
// Otherwise, make sure the width falls in the valid range.
if ((my_data->phys_dev_properties.properties.limits.lineWidthRange[0] > lineWidth) ||
(my_data->phys_dev_properties.properties.limits.lineWidthRange[1] < lineWidth)) {
skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, target,
- __LINE__, dsError, "DS", "Attempt to set lineWidth to %f but physical device limits line width "
- "to between [%f, %f]!",
+ __LINE__, dsError, "DS",
+ "Attempt to set lineWidth to %f but physical device limits line width "
+ "to between [%f, %f]!",
lineWidth, my_data->phys_dev_properties.properties.limits.lineWidthRange[0],
my_data->phys_dev_properties.properties.limits.lineWidthRange[1]);
}
@@ -3156,11 +3141,11 @@
// only attachment state, so memcmp is best suited for the comparison
if (memcmp(static_cast<const void *>(pAttachments), static_cast<const void *>(&pAttachments[i]),
sizeof(pAttachments[0]))) {
- skip_call |=
- log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_01532, "DS", "Invalid Pipeline CreateInfo: If independent blend feature not "
- "enabled, all elements of pAttachments must be identical. %s",
- validation_error_map[VALIDATION_ERROR_01532]);
+ skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
+ __LINE__, VALIDATION_ERROR_01532, "DS",
+ "Invalid Pipeline CreateInfo: If independent blend feature not "
+ "enabled, all elements of pAttachments must be identical. %s",
+ validation_error_map[VALIDATION_ERROR_01532]);
break;
}
}
@@ -3181,8 +3166,9 @@
auto renderPass = getRenderPassState(my_data, pPipeline->graphicsPipelineCI.renderPass);
if (renderPass && pPipeline->graphicsPipelineCI.subpass >= renderPass->createInfo.subpassCount) {
skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_02122, "DS", "Invalid Pipeline CreateInfo State: Subpass index %u "
- "is out of range for this renderpass (0..%u). %s",
+ VALIDATION_ERROR_02122, "DS",
+ "Invalid Pipeline CreateInfo State: Subpass index %u "
+ "is out of range for this renderpass (0..%u). %s",
pPipeline->graphicsPipelineCI.subpass, renderPass->createInfo.subpassCount - 1,
validation_error_map[VALIDATION_ERROR_02122]);
}
@@ -3236,18 +3222,20 @@
(!pPipeline->graphicsPipelineCI.pInputAssemblyState ||
pPipeline->graphicsPipelineCI.pInputAssemblyState->topology != VK_PRIMITIVE_TOPOLOGY_PATCH_LIST)) {
skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_02099, "DS", "Invalid Pipeline CreateInfo State: "
- "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST must be set as IA "
- "topology for tessellation pipelines. %s",
+ VALIDATION_ERROR_02099, "DS",
+ "Invalid Pipeline CreateInfo State: "
+ "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST must be set as IA "
+ "topology for tessellation pipelines. %s",
validation_error_map[VALIDATION_ERROR_02099]);
}
if (pPipeline->graphicsPipelineCI.pInputAssemblyState &&
pPipeline->graphicsPipelineCI.pInputAssemblyState->topology == VK_PRIMITIVE_TOPOLOGY_PATCH_LIST) {
if (~pPipeline->active_shaders & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) {
skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_02100, "DS", "Invalid Pipeline CreateInfo State: "
- "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive "
- "topology is only valid for tessellation pipelines. %s",
+ VALIDATION_ERROR_02100, "DS",
+ "Invalid Pipeline CreateInfo State: "
+ "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive "
+ "topology is only valid for tessellation pipelines. %s",
validation_error_map[VALIDATION_ERROR_02100]);
}
}
@@ -3257,10 +3245,11 @@
(pPipeline->graphicsPipelineCI.pTessellationState->patchControlPoints >
my_data->phys_dev_properties.properties.limits.maxTessellationPatchSize))) {
skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_01426, "DS", "Invalid Pipeline CreateInfo State: "
- "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive "
- "topology used with patchControlPoints value %u."
- " patchControlPoints should be >0 and <=%u. %s",
+ VALIDATION_ERROR_01426, "DS",
+ "Invalid Pipeline CreateInfo State: "
+ "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST primitive "
+ "topology used with patchControlPoints value %u."
+ " patchControlPoints should be >0 and <=%u. %s",
pPipeline->graphicsPipelineCI.pTessellationState->patchControlPoints,
my_data->phys_dev_properties.properties.limits.maxTessellationPatchSize,
validation_error_map[VALIDATION_ERROR_01426]);
@@ -3297,8 +3286,7 @@
// Free the Pipeline nodes
static void deletePipelines(layer_data *my_data) {
- if (my_data->pipelineMap.size() <= 0)
- return;
+ if (my_data->pipelineMap.size() <= 0) return;
for (auto &pipe_map_pair : my_data->pipelineMap) {
delete pipe_map_pair.second;
}
@@ -3319,27 +3307,27 @@
// Return false if update struct is of valid type, otherwise flag error and return code from callback
static bool validUpdateStruct(layer_data *my_data, const VkDevice device, const GENERIC_HEADER *pUpdateStruct) {
switch (pUpdateStruct->sType) {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- return false;
- default:
- return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
- "Unexpected UPDATE struct of type %s (value %u) in vkUpdateDescriptors() struct tree",
- string_VkStructureType(pUpdateStruct->sType), pUpdateStruct->sType);
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ return false;
+ default:
+ return log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
+ "Unexpected UPDATE struct of type %s (value %u) in vkUpdateDescriptors() struct tree",
+ string_VkStructureType(pUpdateStruct->sType), pUpdateStruct->sType);
}
}
// Set count for given update struct in the last parameter
static uint32_t getUpdateCount(layer_data *my_data, const VkDevice device, const GENERIC_HEADER *pUpdateStruct) {
switch (pUpdateStruct->sType) {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- return ((VkWriteDescriptorSet *)pUpdateStruct)->descriptorCount;
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- // TODO : Need to understand this case better and make sure code is correct
- return ((VkCopyDescriptorSet *)pUpdateStruct)->descriptorCount;
- default:
- return 0;
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ return ((VkWriteDescriptorSet *)pUpdateStruct)->descriptorCount;
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ // TODO : Need to understand this case better and make sure code is correct
+ return ((VkCopyDescriptorSet *)pUpdateStruct)->descriptorCount;
+ default:
+ return 0;
}
}
@@ -3361,18 +3349,18 @@
bool skip_call = false;
VkDescriptorType actualType = VK_DESCRIPTOR_TYPE_MAX_ENUM;
switch (pUpdateStruct->sType) {
- case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
- actualType = ((VkWriteDescriptorSet *)pUpdateStruct)->descriptorType;
- break;
- case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
- // No need to validate
- return false;
- break;
- default:
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
- "Unexpected UPDATE struct of type %s (value %u) in vkUpdateDescriptors() struct tree",
- string_VkStructureType(pUpdateStruct->sType), pUpdateStruct->sType);
+ case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:
+ actualType = ((VkWriteDescriptorSet *)pUpdateStruct)->descriptorType;
+ break;
+ case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:
+ // No need to validate
+ return false;
+ break;
+ default:
+ skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_UPDATE_STRUCT, "DS",
+ "Unexpected UPDATE struct of type %s (value %u) in vkUpdateDescriptors() struct tree",
+ string_VkStructureType(pUpdateStruct->sType), pUpdateStruct->sType);
}
if (!skip_call) {
if (layout_type != actualType) {
@@ -3450,8 +3438,7 @@
if (node.layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
imgpair = {image, false, VkImageSubresource()};
auto imgsubIt = pCB->imageLayoutMap.find(imgpair);
- if (imgsubIt == pCB->imageLayoutMap.end())
- return false;
+ if (imgsubIt == pCB->imageLayoutMap.end()) return false;
node = imgsubIt->second;
}
return true;
@@ -3467,8 +3454,7 @@
if (layout == VK_IMAGE_LAYOUT_MAX_ENUM) {
imgpair = {imgpair.image, false, VkImageSubresource()};
auto imgsubIt = my_data->imageLayoutMap.find(imgpair);
- if (imgsubIt == my_data->imageLayoutMap.end())
- return false;
+ if (imgsubIt == my_data->imageLayoutMap.end()) return false;
layout = imgsubIt->second.layout;
}
return true;
@@ -3481,11 +3467,9 @@
bool FindLayouts(const layer_data *my_data, VkImage image, std::vector<VkImageLayout> &layouts) {
auto sub_data = my_data->imageSubresourceMap.find(image);
- if (sub_data == my_data->imageSubresourceMap.end())
- return false;
+ if (sub_data == my_data->imageSubresourceMap.end()) return false;
auto image_state = getImageState(my_data, image);
- if (!image_state)
- return false;
+ if (!image_state) return false;
bool ignoreGlobal = false;
// TODO: Make this robust for >1 aspect mask. Now it will just say ignore
// potential errors in this case.
@@ -3493,8 +3477,7 @@
ignoreGlobal = true;
}
for (auto imgsubpair : sub_data->second) {
- if (ignoreGlobal && !imgsubpair.hasSubresource)
- continue;
+ if (ignoreGlobal && !imgsubpair.hasSubresource) continue;
auto img_data = my_data->imageLayoutMap.find(imgsubpair);
if (img_data != my_data->imageLayoutMap.end()) {
layouts.push_back(img_data->second.layout);
@@ -3559,7 +3542,8 @@
SetLayout(pObject, imgpair, layout, VK_IMAGE_ASPECT_METADATA_BIT);
}
-template <class OBJECT, class LAYOUT> void SetLayout(OBJECT *pObject, VkImage image, const LAYOUT &layout) {
+template <class OBJECT, class LAYOUT>
+void SetLayout(OBJECT *pObject, VkImage image, const LAYOUT &layout) {
ImageSubresourcePair imgpair = {image, false, VkImageSubresource()};
SetLayout(pObject, image, imgpair, layout);
}
@@ -3593,8 +3577,7 @@
// Return false if no errors occur
// Return true if validation error occurs and callback returns true (to skip upcoming API call down the chain)
static bool validateIdleDescriptorSet(const layer_data *dev_data, VkDescriptorSet set, std::string func_str) {
- if (dev_data->instance_data->disabled.idle_descriptor_set)
- return false;
+ if (dev_data->instance_data->disabled.idle_descriptor_set) return false;
bool skip_call = false;
auto set_node = dev_data->setMap.find(set);
if (set_node == dev_data->setMap.end()) {
@@ -3623,8 +3606,7 @@
// Free all DS Pools including their Sets & related sub-structs
// NOTE : Calls to this function should be wrapped in mutex
static void deletePools(layer_data *my_data) {
- if (my_data->descriptorPoolMap.size() <= 0)
- return;
+ if (my_data->descriptorPoolMap.size() <= 0) return;
for (auto ii = my_data->descriptorPoolMap.begin(); ii != my_data->descriptorPoolMap.end(); ++ii) {
// Remove this pools' sets from setMap and delete them
for (auto ds : (*ii).second->sets) {
@@ -3682,8 +3664,7 @@
// If a renderpass is active, verify that the given command type is appropriate for current subpass state
bool ValidateCmdSubpassState(const layer_data *dev_data, const GLOBAL_CB_NODE *pCB, const CMD_TYPE cmd_type) {
- if (!pCB->activeRenderPass)
- return false;
+ if (!pCB->activeRenderPass) return false;
bool skip_call = false;
if (pCB->activeSubpassContents == VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS &&
(cmd_type != CMD_EXECUTECOMMANDS && cmd_type != CMD_NEXTSUBPASS && cmd_type != CMD_ENDRENDERPASS)) {
@@ -3730,61 +3711,61 @@
if (pPool) {
VkQueueFlags flags = my_data->phys_dev_properties.queue_family_properties[pPool->queueFamilyIndex].queueFlags;
switch (cmd) {
- case CMD_BINDPIPELINE:
- case CMD_BINDPIPELINEDELTA:
- case CMD_BINDDESCRIPTORSETS:
- case CMD_FILLBUFFER:
- case CMD_CLEARCOLORIMAGE:
- case CMD_SETEVENT:
- case CMD_RESETEVENT:
- case CMD_WAITEVENTS:
- case CMD_BEGINQUERY:
- case CMD_ENDQUERY:
- case CMD_RESETQUERYPOOL:
- case CMD_COPYQUERYPOOLRESULTS:
- case CMD_WRITETIMESTAMP:
- skip_call |= checkGraphicsOrComputeBit(my_data, flags, cmdTypeToString(cmd).c_str());
- break;
- case CMD_SETVIEWPORTSTATE:
- case CMD_SETSCISSORSTATE:
- case CMD_SETLINEWIDTHSTATE:
- case CMD_SETDEPTHBIASSTATE:
- case CMD_SETBLENDSTATE:
- case CMD_SETDEPTHBOUNDSSTATE:
- case CMD_SETSTENCILREADMASKSTATE:
- case CMD_SETSTENCILWRITEMASKSTATE:
- case CMD_SETSTENCILREFERENCESTATE:
- case CMD_BINDINDEXBUFFER:
- case CMD_BINDVERTEXBUFFER:
- case CMD_DRAW:
- case CMD_DRAWINDEXED:
- case CMD_DRAWINDIRECT:
- case CMD_DRAWINDEXEDINDIRECT:
- case CMD_BLITIMAGE:
- case CMD_CLEARATTACHMENTS:
- case CMD_CLEARDEPTHSTENCILIMAGE:
- case CMD_RESOLVEIMAGE:
- case CMD_BEGINRENDERPASS:
- case CMD_NEXTSUBPASS:
- case CMD_ENDRENDERPASS:
- skip_call |= checkGraphicsBit(my_data, flags, cmdTypeToString(cmd).c_str());
- break;
- case CMD_DISPATCH:
- case CMD_DISPATCHINDIRECT:
- skip_call |= checkComputeBit(my_data, flags, cmdTypeToString(cmd).c_str());
- break;
- case CMD_COPYBUFFER:
- case CMD_COPYIMAGE:
- case CMD_COPYBUFFERTOIMAGE:
- case CMD_COPYIMAGETOBUFFER:
- case CMD_CLONEIMAGEDATA:
- case CMD_UPDATEBUFFER:
- case CMD_PIPELINEBARRIER:
- case CMD_EXECUTECOMMANDS:
- case CMD_END:
- break;
- default:
- break;
+ case CMD_BINDPIPELINE:
+ case CMD_BINDPIPELINEDELTA:
+ case CMD_BINDDESCRIPTORSETS:
+ case CMD_FILLBUFFER:
+ case CMD_CLEARCOLORIMAGE:
+ case CMD_SETEVENT:
+ case CMD_RESETEVENT:
+ case CMD_WAITEVENTS:
+ case CMD_BEGINQUERY:
+ case CMD_ENDQUERY:
+ case CMD_RESETQUERYPOOL:
+ case CMD_COPYQUERYPOOLRESULTS:
+ case CMD_WRITETIMESTAMP:
+ skip_call |= checkGraphicsOrComputeBit(my_data, flags, cmdTypeToString(cmd).c_str());
+ break;
+ case CMD_SETVIEWPORTSTATE:
+ case CMD_SETSCISSORSTATE:
+ case CMD_SETLINEWIDTHSTATE:
+ case CMD_SETDEPTHBIASSTATE:
+ case CMD_SETBLENDSTATE:
+ case CMD_SETDEPTHBOUNDSSTATE:
+ case CMD_SETSTENCILREADMASKSTATE:
+ case CMD_SETSTENCILWRITEMASKSTATE:
+ case CMD_SETSTENCILREFERENCESTATE:
+ case CMD_BINDINDEXBUFFER:
+ case CMD_BINDVERTEXBUFFER:
+ case CMD_DRAW:
+ case CMD_DRAWINDEXED:
+ case CMD_DRAWINDIRECT:
+ case CMD_DRAWINDEXEDINDIRECT:
+ case CMD_BLITIMAGE:
+ case CMD_CLEARATTACHMENTS:
+ case CMD_CLEARDEPTHSTENCILIMAGE:
+ case CMD_RESOLVEIMAGE:
+ case CMD_BEGINRENDERPASS:
+ case CMD_NEXTSUBPASS:
+ case CMD_ENDRENDERPASS:
+ skip_call |= checkGraphicsBit(my_data, flags, cmdTypeToString(cmd).c_str());
+ break;
+ case CMD_DISPATCH:
+ case CMD_DISPATCHINDIRECT:
+ skip_call |= checkComputeBit(my_data, flags, cmdTypeToString(cmd).c_str());
+ break;
+ case CMD_COPYBUFFER:
+ case CMD_COPYIMAGE:
+ case CMD_COPYBUFFERTOIMAGE:
+ case CMD_COPYIMAGETOBUFFER:
+ case CMD_CLONEIMAGEDATA:
+ case CMD_UPDATEBUFFER:
+ case CMD_PIPELINEBARRIER:
+ case CMD_EXECUTECOMMANDS:
+ case CMD_END:
+ break;
+ default:
+ break;
}
}
if (pCB->state != CB_RECORDING) {
@@ -3804,66 +3785,66 @@
BASE_NODE *GetStateStructPtrFromObject(layer_data *dev_data, VK_OBJECT object_struct) {
BASE_NODE *base_ptr = nullptr;
switch (object_struct.type) {
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: {
- base_ptr = getSetNode(dev_data, reinterpret_cast<VkDescriptorSet &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: {
- base_ptr = getSamplerState(dev_data, reinterpret_cast<VkSampler &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: {
- base_ptr = getQueryPoolNode(dev_data, reinterpret_cast<VkQueryPool &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: {
- base_ptr = getPipelineState(dev_data, reinterpret_cast<VkPipeline &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
- base_ptr = getBufferState(dev_data, reinterpret_cast<VkBuffer &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: {
- base_ptr = getBufferViewState(dev_data, reinterpret_cast<VkBufferView &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
- base_ptr = getImageState(dev_data, reinterpret_cast<VkImage &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: {
- base_ptr = getImageViewState(dev_data, reinterpret_cast<VkImageView &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: {
- base_ptr = getEventNode(dev_data, reinterpret_cast<VkEvent &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: {
- base_ptr = getDescriptorPoolState(dev_data, reinterpret_cast<VkDescriptorPool &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: {
- base_ptr = getCommandPoolNode(dev_data, reinterpret_cast<VkCommandPool &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: {
- base_ptr = getFramebufferState(dev_data, reinterpret_cast<VkFramebuffer &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: {
- base_ptr = getRenderPassState(dev_data, reinterpret_cast<VkRenderPass &>(object_struct.handle));
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: {
- base_ptr = getMemObjInfo(dev_data, reinterpret_cast<VkDeviceMemory &>(object_struct.handle));
- break;
- }
- default:
- // TODO : Any other objects to be handled here?
- assert(0);
- break;
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: {
+ base_ptr = getSetNode(dev_data, reinterpret_cast<VkDescriptorSet &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: {
+ base_ptr = getSamplerState(dev_data, reinterpret_cast<VkSampler &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: {
+ base_ptr = getQueryPoolNode(dev_data, reinterpret_cast<VkQueryPool &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: {
+ base_ptr = getPipelineState(dev_data, reinterpret_cast<VkPipeline &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
+ base_ptr = getBufferState(dev_data, reinterpret_cast<VkBuffer &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: {
+ base_ptr = getBufferViewState(dev_data, reinterpret_cast<VkBufferView &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
+ base_ptr = getImageState(dev_data, reinterpret_cast<VkImage &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: {
+ base_ptr = getImageViewState(dev_data, reinterpret_cast<VkImageView &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: {
+ base_ptr = getEventNode(dev_data, reinterpret_cast<VkEvent &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: {
+ base_ptr = getDescriptorPoolState(dev_data, reinterpret_cast<VkDescriptorPool &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: {
+ base_ptr = getCommandPoolNode(dev_data, reinterpret_cast<VkCommandPool &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: {
+ base_ptr = getFramebufferState(dev_data, reinterpret_cast<VkFramebuffer &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: {
+ base_ptr = getRenderPassState(dev_data, reinterpret_cast<VkRenderPass &>(object_struct.handle));
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: {
+ base_ptr = getMemObjInfo(dev_data, reinterpret_cast<VkDeviceMemory &>(object_struct.handle));
+ break;
+ }
+ default:
+ // TODO : Any other objects to be handled here?
+ assert(0);
+ break;
}
return base_ptr;
}
@@ -3878,8 +3859,7 @@
// For a given object, if cb_node is in that objects cb_bindings, remove cb_node
static void removeCommandBufferBinding(layer_data *dev_data, VK_OBJECT const *object, GLOBAL_CB_NODE *cb_node) {
BASE_NODE *base_obj = GetStateStructPtrFromObject(dev_data, *object);
- if (base_obj)
- base_obj->cb_bindings.erase(cb_node);
+ if (base_obj) base_obj->cb_bindings.erase(cb_node);
}
// Reset the command buffer state
// Maintain the createInfo and set state to CB_NEW, but clear all other state
@@ -3942,8 +3922,7 @@
// Remove this cmdBuffer's reference from each FrameBuffer's CB ref list
for (auto framebuffer : pCB->framebuffers) {
auto fb_state = getFramebufferState(dev_data, framebuffer);
- if (fb_state)
- fb_state->cb_bindings.erase(pCB);
+ if (fb_state) fb_state->cb_bindings.erase(pCB);
}
pCB->framebuffers.clear();
pCB->activeFramebuffer = VK_NULL_HANDLE;
@@ -3954,7 +3933,7 @@
static void set_cb_pso_status(GLOBAL_CB_NODE *pCB, const PIPELINE_STATE *pPipe) {
// Account for any dynamic state not set via this PSO
if (!pPipe->graphicsPipelineCI.pDynamicState ||
- !pPipe->graphicsPipelineCI.pDynamicState->dynamicStateCount) { // All state is static
+ !pPipe->graphicsPipelineCI.pDynamicState->dynamicStateCount) { // All state is static
pCB->status |= CBSTATUS_ALL_STATE_SET;
} else {
// First consider all state on
@@ -3963,30 +3942,30 @@
CBStatusFlags psoDynStateMask = CBSTATUS_ALL_STATE_SET;
for (uint32_t i = 0; i < pPipe->graphicsPipelineCI.pDynamicState->dynamicStateCount; i++) {
switch (pPipe->graphicsPipelineCI.pDynamicState->pDynamicStates[i]) {
- case VK_DYNAMIC_STATE_LINE_WIDTH:
- psoDynStateMask &= ~CBSTATUS_LINE_WIDTH_SET;
- break;
- case VK_DYNAMIC_STATE_DEPTH_BIAS:
- psoDynStateMask &= ~CBSTATUS_DEPTH_BIAS_SET;
- break;
- case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
- psoDynStateMask &= ~CBSTATUS_BLEND_CONSTANTS_SET;
- break;
- case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
- psoDynStateMask &= ~CBSTATUS_DEPTH_BOUNDS_SET;
- break;
- case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
- psoDynStateMask &= ~CBSTATUS_STENCIL_READ_MASK_SET;
- break;
- case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
- psoDynStateMask &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
- break;
- case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
- psoDynStateMask &= ~CBSTATUS_STENCIL_REFERENCE_SET;
- break;
- default:
- // TODO : Flag error here
- break;
+ case VK_DYNAMIC_STATE_LINE_WIDTH:
+ psoDynStateMask &= ~CBSTATUS_LINE_WIDTH_SET;
+ break;
+ case VK_DYNAMIC_STATE_DEPTH_BIAS:
+ psoDynStateMask &= ~CBSTATUS_DEPTH_BIAS_SET;
+ break;
+ case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
+ psoDynStateMask &= ~CBSTATUS_BLEND_CONSTANTS_SET;
+ break;
+ case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
+ psoDynStateMask &= ~CBSTATUS_DEPTH_BOUNDS_SET;
+ break;
+ case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
+ psoDynStateMask &= ~CBSTATUS_STENCIL_READ_MASK_SET;
+ break;
+ case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
+ psoDynStateMask &= ~CBSTATUS_STENCIL_WRITE_MASK_SET;
+ break;
+ case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
+ psoDynStateMask &= ~CBSTATUS_STENCIL_REFERENCE_SET;
+ break;
+ default:
+ // TODO : Flag error here
+ break;
}
}
pCB->status |= psoDynStateMask;
@@ -4023,7 +4002,6 @@
}
static void init_core_validation(instance_layer_data *instance_data, const VkAllocationCallbacks *pAllocator) {
-
layer_debug_actions(instance_data->report_data, instance_data->logging_callback, pAllocator, "lunarg_core_validation");
}
@@ -4067,15 +4045,13 @@
assert(chain_info->u.pLayerInfo);
PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance)fpGetInstanceProcAddr(NULL, "vkCreateInstance");
- if (fpCreateInstance == NULL)
- return VK_ERROR_INITIALIZATION_FAILED;
+ if (fpCreateInstance == NULL) return VK_ERROR_INITIALIZATION_FAILED;
// Advance the link info for the next element on the chain
chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
- if (result != VK_SUCCESS)
- return result;
+ if (result != VK_SUCCESS) return result;
instance_layer_data *instance_data = get_my_data_ptr(get_dispatch_key(*pInstance), instance_layer_data_map);
instance_data->instance = *pInstance;
@@ -4153,13 +4129,13 @@
"Invalid queue create request in vkCreateDevice(). Invalid queueFamilyIndex %u requested.", requestedIndex);
} else if (create_info->pQueueCreateInfos[i].queueCount >
physical_device_state->queue_family_properties[requestedIndex].queueCount) {
- skip_call |=
- log_msg(instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST,
- "DL", "Invalid queue create request in vkCreateDevice(). QueueFamilyIndex %u only has %u queues, but "
- "requested queueCount is %u.",
- requestedIndex, physical_device_state->queue_family_properties[requestedIndex].queueCount,
- create_info->pQueueCreateInfos[i].queueCount);
+ skip_call |= log_msg(
+ instance_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST, "DL",
+ "Invalid queue create request in vkCreateDevice(). QueueFamilyIndex %u only has %u queues, but "
+ "requested queueCount is %u.",
+ requestedIndex, physical_device_state->queue_family_properties[requestedIndex].queueCount,
+ create_info->pQueueCreateInfos[i].queueCount);
}
}
}
@@ -4182,11 +4158,12 @@
for (uint32_t i = 0; i < total_bools; i++) {
if (requested[i] > actual[i]) {
// TODO: Add index to struct member name helper to be able to include a feature name
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, DEVLIMITS_INVALID_FEATURE_REQUESTED,
- "DL", "While calling vkCreateDevice(), requesting feature #%u in VkPhysicalDeviceFeatures struct, "
- "which is not available on this device.",
- i);
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, DEVLIMITS_INVALID_FEATURE_REQUESTED, "DL",
+ "While calling vkCreateDevice(), requesting feature #%u in VkPhysicalDeviceFeatures struct, "
+ "which is not available on this device.",
+ i);
errors++;
}
}
@@ -4318,16 +4295,18 @@
bool skip = false;
if (!dev_data->enabled_features.geometryShader && (stageMask & VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT)) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
- geo_error_id, "DL", "%s call includes a stageMask with VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT bit set when "
- "device does not have geometryShader feature enabled. %s",
+ geo_error_id, "DL",
+ "%s call includes a stageMask with VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT bit set when "
+ "device does not have geometryShader feature enabled. %s",
caller, validation_error_map[geo_error_id]);
}
if (!dev_data->enabled_features.tessellationShader &&
(stageMask & (VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT))) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
- tess_error_id, "DL", "%s call includes a stageMask with VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT "
- "and/or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT bit(s) set when device "
- "does not have tessellationShader feature enabled. %s",
+ tess_error_id, "DL",
+ "%s call includes a stageMask with VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT "
+ "and/or VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT bit(s) set when device "
+ "does not have tessellationShader feature enabled. %s",
caller, validation_error_map[tess_error_id]);
}
return skip;
@@ -4360,13 +4339,14 @@
cb_image_data.first.subresource.arrayLayer, cb_image_data.first.subresource.mipLevel,
string_VkImageLayout(imageLayout), string_VkImageLayout(cb_image_data.second.initialLayout));
} else {
- skip_call |= log_msg(
- dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- reinterpret_cast<uint64_t &>(pCB->commandBuffer), __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Cannot submit cmd buffer using image (0x%" PRIx64 ") with layout %s when "
- "first use is %s.",
- reinterpret_cast<const uint64_t &>(cb_image_data.first.image), string_VkImageLayout(imageLayout),
- string_VkImageLayout(cb_image_data.second.initialLayout));
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, reinterpret_cast<uint64_t &>(pCB->commandBuffer),
+ __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", "Cannot submit cmd buffer using image (0x%" PRIx64
+ ") with layout %s when "
+ "first use is %s.",
+ reinterpret_cast<const uint64_t &>(cb_image_data.first.image), string_VkImageLayout(imageLayout),
+ string_VkImageLayout(cb_image_data.second.initialLayout));
}
}
SetLayout(dev_data, cb_image_data.first, cb_image_data.second.layout);
@@ -4383,79 +4363,79 @@
BASE_NODE *base_obj = nullptr;
for (auto obj : cb_node->object_bindings) {
switch (obj.type) {
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: {
- base_obj = getSetNode(dev_data, reinterpret_cast<VkDescriptorSet &>(obj.handle));
- error_code = DRAWSTATE_INVALID_DESCRIPTOR_SET;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: {
- base_obj = getSamplerState(dev_data, reinterpret_cast<VkSampler &>(obj.handle));
- error_code = DRAWSTATE_INVALID_SAMPLER;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: {
- base_obj = getQueryPoolNode(dev_data, reinterpret_cast<VkQueryPool &>(obj.handle));
- error_code = DRAWSTATE_INVALID_QUERY_POOL;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: {
- base_obj = getPipelineState(dev_data, reinterpret_cast<VkPipeline &>(obj.handle));
- error_code = DRAWSTATE_INVALID_PIPELINE;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
- base_obj = getBufferState(dev_data, reinterpret_cast<VkBuffer &>(obj.handle));
- error_code = DRAWSTATE_INVALID_BUFFER;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: {
- base_obj = getBufferViewState(dev_data, reinterpret_cast<VkBufferView &>(obj.handle));
- error_code = DRAWSTATE_INVALID_BUFFER_VIEW;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
- base_obj = getImageState(dev_data, reinterpret_cast<VkImage &>(obj.handle));
- error_code = DRAWSTATE_INVALID_IMAGE;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: {
- base_obj = getImageViewState(dev_data, reinterpret_cast<VkImageView &>(obj.handle));
- error_code = DRAWSTATE_INVALID_IMAGE_VIEW;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: {
- base_obj = getEventNode(dev_data, reinterpret_cast<VkEvent &>(obj.handle));
- error_code = DRAWSTATE_INVALID_EVENT;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: {
- base_obj = getDescriptorPoolState(dev_data, reinterpret_cast<VkDescriptorPool &>(obj.handle));
- error_code = DRAWSTATE_INVALID_DESCRIPTOR_POOL;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: {
- base_obj = getCommandPoolNode(dev_data, reinterpret_cast<VkCommandPool &>(obj.handle));
- error_code = DRAWSTATE_INVALID_COMMAND_POOL;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: {
- base_obj = getFramebufferState(dev_data, reinterpret_cast<VkFramebuffer &>(obj.handle));
- error_code = DRAWSTATE_INVALID_FRAMEBUFFER;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: {
- base_obj = getRenderPassState(dev_data, reinterpret_cast<VkRenderPass &>(obj.handle));
- error_code = DRAWSTATE_INVALID_RENDERPASS;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: {
- base_obj = getMemObjInfo(dev_data, reinterpret_cast<VkDeviceMemory &>(obj.handle));
- error_code = DRAWSTATE_INVALID_DEVICE_MEMORY;
- break;
- }
- default:
- // TODO : Merge handling of other objects types into this code
- break;
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT: {
+ base_obj = getSetNode(dev_data, reinterpret_cast<VkDescriptorSet &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_DESCRIPTOR_SET;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT: {
+ base_obj = getSamplerState(dev_data, reinterpret_cast<VkSampler &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_SAMPLER;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT: {
+ base_obj = getQueryPoolNode(dev_data, reinterpret_cast<VkQueryPool &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_QUERY_POOL;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT: {
+ base_obj = getPipelineState(dev_data, reinterpret_cast<VkPipeline &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_PIPELINE;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
+ base_obj = getBufferState(dev_data, reinterpret_cast<VkBuffer &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_BUFFER;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT: {
+ base_obj = getBufferViewState(dev_data, reinterpret_cast<VkBufferView &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_BUFFER_VIEW;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
+ base_obj = getImageState(dev_data, reinterpret_cast<VkImage &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_IMAGE;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT: {
+ base_obj = getImageViewState(dev_data, reinterpret_cast<VkImageView &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_IMAGE_VIEW;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT: {
+ base_obj = getEventNode(dev_data, reinterpret_cast<VkEvent &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_EVENT;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT: {
+ base_obj = getDescriptorPoolState(dev_data, reinterpret_cast<VkDescriptorPool &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_DESCRIPTOR_POOL;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT: {
+ base_obj = getCommandPoolNode(dev_data, reinterpret_cast<VkCommandPool &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_COMMAND_POOL;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT: {
+ base_obj = getFramebufferState(dev_data, reinterpret_cast<VkFramebuffer &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_FRAMEBUFFER;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT: {
+ base_obj = getRenderPassState(dev_data, reinterpret_cast<VkRenderPass &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_RENDERPASS;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT: {
+ base_obj = getMemObjInfo(dev_data, reinterpret_cast<VkDeviceMemory &>(obj.handle));
+ error_code = DRAWSTATE_INVALID_DEVICE_MEMORY;
+ break;
+ }
+ default:
+ // TODO : Merge handling of other objects types into this code
+ break;
}
if (!base_obj) {
skip |=
@@ -4494,8 +4474,7 @@
}
for (auto event : cb_node->writeEventsBeforeWait) {
auto event_state = getEventNode(dev_data, event);
- if (event_state)
- event_state->write_in_use++;
+ if (event_state) event_state->write_in_use++;
}
return skip_call;
}
@@ -4661,8 +4640,7 @@
static bool validateCommandBufferState(layer_data *dev_data, GLOBAL_CB_NODE *pCB, const char *call_source) {
bool skip = false;
- if (dev_data->instance_data->disabled.command_buffer_state)
- return skip;
+ if (dev_data->instance_data->disabled.command_buffer_state) return skip;
// Validate ONE_TIME_SUBMIT_BIT CB is not being submitted more than once
if ((pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT) && (pCB->submitCount > 1)) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
@@ -4687,7 +4665,7 @@
"You are submitting command buffer 0x%p that is invalid because bound %s 0x%" PRIxLEAST64 " was %s.",
pCB->commandBuffer, type_str, obj.handle, cause_str);
}
- } else { // Flag error for using CB w/o vkEndCommandBuffer() called
+ } else { // Flag error for using CB w/o vkEndCommandBuffer() called
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
(uint64_t)(pCB->commandBuffer), __LINE__, DRAWSTATE_NO_END_COMMAND_BUFFER, "DS",
"You must call vkEndCommandBuffer() on command buffer 0x%p before this call to %s!", pCB->commandBuffer,
@@ -4851,12 +4829,11 @@
cbs.push_back(secondaryCmdBuffer);
}
- cb_node->submitCount++; // increment submit count
+ cb_node->submitCount++; // increment submit count
skip_call |= validatePrimaryCommandBufferState(dev_data, cb_node);
skip_call |= validateQueueFamilyIndices(dev_data, cb_node, queue);
// Potential early exit here as bad object state may crash in delayed function calls
- if (skip_call)
- return result;
+ if (skip_call) return result;
// Call submit-time functions to validate/update state
for (auto &function : cb_node->validate_functions) {
skip_call |= function();
@@ -4883,8 +4860,7 @@
}
lock.unlock();
- if (!skip_call)
- result = dev_data->dispatch_table.QueueSubmit(queue, submitCount, pSubmits, fence);
+ if (!skip_call) result = dev_data->dispatch_table.QueueSubmit(queue, submitCount, pSubmits, fence);
return result;
}
@@ -4926,8 +4902,7 @@
// For given obj node, if it is use, flag a validation error and return callback result, else return false
bool ValidateObjectNotInUse(const layer_data *dev_data, BASE_NODE *obj_node, VK_OBJECT obj_struct,
UNIQUE_VALIDATION_ERROR_CODE error_code) {
- if (dev_data->instance_data->disabled.object_in_use)
- return false;
+ if (dev_data->instance_data->disabled.object_in_use) return false;
bool skip = false;
if (obj_node->in_use.load()) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, obj_struct.type, obj_struct.handle, __LINE__,
@@ -4940,8 +4915,7 @@
static bool PreCallValidateFreeMemory(layer_data *dev_data, VkDeviceMemory mem, DEVICE_MEM_INFO **mem_info, VK_OBJECT *obj_struct) {
*mem_info = getMemObjInfo(dev_data, mem);
*obj_struct = {reinterpret_cast<uint64_t &>(mem), VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT};
- if (dev_data->instance_data->disabled.free_memory)
- return false;
+ if (dev_data->instance_data->disabled.free_memory) return false;
bool skip = false;
if (*mem_info) {
skip |= ValidateObjectNotInUse(dev_data, *mem_info, *obj_struct, VALIDATION_ERROR_00620);
@@ -4956,21 +4930,21 @@
"MEM", "VK Object 0x%" PRIxLEAST64 " still has a reference to mem obj 0x%" PRIxLEAST64, obj.handle,
(uint64_t)mem_info->mem);
switch (obj.type) {
- case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
- auto image_state = getImageState(dev_data, reinterpret_cast<VkImage &>(obj.handle));
- assert(image_state); // Any destroyed images should already be removed from bindings
- image_state->binding.mem = MEMORY_UNBOUND;
- break;
- }
- case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
- auto buffer_state = getBufferState(dev_data, reinterpret_cast<VkBuffer &>(obj.handle));
- assert(buffer_state); // Any destroyed buffers should already be removed from bindings
- buffer_state->binding.mem = MEMORY_UNBOUND;
- break;
- }
- default:
- // Should only have buffer or image objects bound to memory
- assert(0);
+ case VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT: {
+ auto image_state = getImageState(dev_data, reinterpret_cast<VkImage &>(obj.handle));
+ assert(image_state); // Any destroyed images should already be removed from bindings
+ image_state->binding.mem = MEMORY_UNBOUND;
+ break;
+ }
+ case VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT: {
+ auto buffer_state = getBufferState(dev_data, reinterpret_cast<VkBuffer &>(obj.handle));
+ assert(buffer_state); // Any destroyed buffers should already be removed from bindings
+ buffer_state->binding.mem = MEMORY_UNBOUND;
+ break;
+ }
+ default:
+ // Should only have buffer or image objects bound to memory
+ assert(0);
}
}
// Any bound cmd buffers are now invalid
@@ -5117,7 +5091,8 @@
if (pFence->state == FENCE_UNSIGNALED) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
reinterpret_cast<uint64_t &>(fence), __LINE__, MEMTRACK_INVALID_FENCE_STATE, "MEM",
- "%s called for fence 0x%" PRIxLEAST64 " which has not been submitted on a Queue or during "
+ "%s called for fence 0x%" PRIxLEAST64
+ " which has not been submitted on a Queue or during "
"acquire next image.",
apiCall, reinterpret_cast<uint64_t &>(fence));
}
@@ -5138,8 +5113,7 @@
}
static bool PreCallValidateWaitForFences(layer_data *dev_data, uint32_t fence_count, const VkFence *fences) {
- if (dev_data->instance_data->disabled.wait_for_fences)
- return false;
+ if (dev_data->instance_data->disabled.wait_for_fences) return false;
bool skip = false;
for (uint32_t i = 0; i < fence_count; i++) {
skip |= verifyWaitFenceState(dev_data, fences[i], "vkWaitForFences");
@@ -5167,8 +5141,7 @@
std::unique_lock<std::mutex> lock(global_lock);
bool skip = PreCallValidateWaitForFences(dev_data, fenceCount, pFences);
lock.unlock();
- if (skip)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.WaitForFences(device, fenceCount, pFences, waitAll, timeout);
@@ -5181,8 +5154,7 @@
}
static bool PreCallValidateGetFenceStatus(layer_data *dev_data, VkFence fence) {
- if (dev_data->instance_data->disabled.get_fence_state)
- return false;
+ if (dev_data->instance_data->disabled.get_fence_state) return false;
return verifyWaitFenceState(dev_data, fence, "vkGetFenceStatus");
}
@@ -5193,8 +5165,7 @@
std::unique_lock<std::mutex> lock(global_lock);
bool skip = PreCallValidateGetFenceStatus(dev_data, fence);
lock.unlock();
- if (skip)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.GetFenceStatus(device, fence);
if (result == VK_SUCCESS) {
@@ -5226,8 +5197,7 @@
static bool PreCallValidateQueueWaitIdle(layer_data *dev_data, VkQueue queue, QUEUE_STATE **queue_state) {
*queue_state = getQueueState(dev_data, queue);
- if (dev_data->instance_data->disabled.queue_wait_idle)
- return false;
+ if (dev_data->instance_data->disabled.queue_wait_idle) return false;
return VerifyQueueStateToSeq(dev_data, *queue_state, (*queue_state)->seq + (*queue_state)->submissions.size());
}
@@ -5241,8 +5211,7 @@
std::unique_lock<std::mutex> lock(global_lock);
bool skip = PreCallValidateQueueWaitIdle(dev_data, queue, &queue_state);
lock.unlock();
- if (skip)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.QueueWaitIdle(queue);
if (VK_SUCCESS == result) {
lock.lock();
@@ -5253,8 +5222,7 @@
}
static bool PreCallValidateDeviceWaitIdle(layer_data *dev_data) {
- if (dev_data->instance_data->disabled.device_wait_idle)
- return false;
+ if (dev_data->instance_data->disabled.device_wait_idle) return false;
bool skip = false;
for (auto &queue : dev_data->queueMap) {
skip |= VerifyQueueStateToSeq(dev_data, &queue.second, queue.second.seq + queue.second.submissions.size());
@@ -5273,8 +5241,7 @@
std::unique_lock<std::mutex> lock(global_lock);
bool skip = PreCallValidateDeviceWaitIdle(dev_data);
lock.unlock();
- if (skip)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.DeviceWaitIdle(device);
if (VK_SUCCESS == result) {
lock.lock();
@@ -5287,8 +5254,7 @@
static bool PreCallValidateDestroyFence(layer_data *dev_data, VkFence fence, FENCE_NODE **fence_node, VK_OBJECT *obj_struct) {
*fence_node = getFenceNode(dev_data, fence);
*obj_struct = {reinterpret_cast<uint64_t &>(fence), VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT};
- if (dev_data->instance_data->disabled.destroy_fence)
- return false;
+ if (dev_data->instance_data->disabled.destroy_fence) return false;
bool skip = false;
if (*fence_node) {
if ((*fence_node)->state == FENCE_INFLIGHT) {
@@ -5322,8 +5288,7 @@
VK_OBJECT *obj_struct) {
*sema_node = getSemaphoreNode(dev_data, semaphore);
*obj_struct = {reinterpret_cast<uint64_t &>(semaphore), VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT};
- if (dev_data->instance_data->disabled.destroy_semaphore)
- return false;
+ if (dev_data->instance_data->disabled.destroy_semaphore) return false;
bool skip = false;
if (*sema_node) {
skip |= ValidateObjectNotInUse(dev_data, *sema_node, *obj_struct, VALIDATION_ERROR_00199);
@@ -5350,8 +5315,7 @@
static bool PreCallValidateDestroyEvent(layer_data *dev_data, VkEvent event, EVENT_STATE **event_state, VK_OBJECT *obj_struct) {
*event_state = getEventNode(dev_data, event);
*obj_struct = {reinterpret_cast<uint64_t &>(event), VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT};
- if (dev_data->instance_data->disabled.destroy_event)
- return false;
+ if (dev_data->instance_data->disabled.destroy_event) return false;
bool skip = false;
if (*event_state) {
skip |= ValidateObjectNotInUse(dev_data, *event_state, *obj_struct, VALIDATION_ERROR_00213);
@@ -5382,8 +5346,7 @@
VK_OBJECT *obj_struct) {
*qp_state = getQueryPoolNode(dev_data, query_pool);
*obj_struct = {reinterpret_cast<uint64_t &>(query_pool), VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT};
- if (dev_data->instance_data->disabled.destroy_query_pool)
- return false;
+ if (dev_data->instance_data->disabled.destroy_query_pool) return false;
bool skip = false;
if (*qp_state) {
skip |= ValidateObjectNotInUse(dev_data, *qp_state, *obj_struct, VALIDATION_ERROR_01012);
@@ -5419,8 +5382,7 @@
(*queries_in_flight)[query_state_pair.first].push_back(cmd_buffer);
}
}
- if (dev_data->instance_data->disabled.get_query_pool_results)
- return false;
+ if (dev_data->instance_data->disabled.get_query_pool_results) return false;
bool skip = false;
for (uint32_t i = 0; i < query_count; ++i) {
QueryObject query = {query_pool, first_query + i};
@@ -5506,8 +5468,7 @@
std::unique_lock<std::mutex> lock(global_lock);
bool skip = PreCallValidateGetQueryPoolResults(dev_data, queryPool, firstQuery, queryCount, flags, &queries_in_flight);
lock.unlock();
- if (skip)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result =
dev_data->dispatch_table.GetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
lock.lock();
@@ -5550,10 +5511,8 @@
if (range1->linear != range2->linear) {
pad_align = dev_data->phys_dev_properties.properties.limits.bufferImageGranularity;
}
- if ((r1_end & ~(pad_align - 1)) < (r2_start & ~(pad_align - 1)))
- return false;
- if ((r1_start & ~(pad_align - 1)) > (r2_end & ~(pad_align - 1)))
- return false;
+ if ((r1_end & ~(pad_align - 1)) < (r2_start & ~(pad_align - 1))) return false;
+ if ((r1_start & ~(pad_align - 1)) > (r2_end & ~(pad_align - 1))) return false;
if (range1->linear != range2->linear) {
// In linear vs. non-linear case, warn of aliasing
@@ -5681,8 +5640,7 @@
VK_OBJECT *obj_struct) {
*buffer_state = getBufferState(dev_data, buffer);
*obj_struct = {reinterpret_cast<uint64_t &>(buffer), VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT};
- if (dev_data->instance_data->disabled.destroy_buffer)
- return false;
+ if (dev_data->instance_data->disabled.destroy_buffer) return false;
bool skip = false;
if (*buffer_state) {
skip |= validateIdleBuffer(dev_data, buffer);
@@ -5720,8 +5678,7 @@
VK_OBJECT *obj_struct) {
*buffer_view_state = getBufferViewState(dev_data, buffer_view);
*obj_struct = {reinterpret_cast<uint64_t &>(buffer_view), VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT};
- if (dev_data->instance_data->disabled.destroy_buffer_view)
- return false;
+ if (dev_data->instance_data->disabled.destroy_buffer_view) return false;
bool skip = false;
if (*buffer_view_state) {
skip |= ValidateObjectNotInUse(dev_data, *buffer_view_state, *obj_struct, VALIDATION_ERROR_00701);
@@ -5755,8 +5712,7 @@
static bool PreCallValidateDestroyImage(layer_data *dev_data, VkImage image, IMAGE_STATE **image_state, VK_OBJECT *obj_struct) {
*image_state = getImageState(dev_data, image);
*obj_struct = {reinterpret_cast<uint64_t &>(image), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT};
- if (dev_data->instance_data->disabled.destroy_image)
- return false;
+ if (dev_data->instance_data->disabled.destroy_image) return false;
bool skip = false;
if (*image_state) {
skip |= ValidateObjectNotInUse(dev_data, *image_state, *obj_struct, VALIDATION_ERROR_00743);
@@ -5854,7 +5810,8 @@
if (vk_safe_modulo(memoryOffset, buffer_state->requirements.alignment) != 0) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, VALIDATION_ERROR_02174, "DS",
- "vkBindBufferMemory(): memoryOffset is 0x%" PRIxLEAST64 " but must be an integer multiple of the "
+ "vkBindBufferMemory(): memoryOffset is 0x%" PRIxLEAST64
+ " but must be an integer multiple of the "
"VkMemoryRequirements::alignment value 0x%" PRIxLEAST64
", returned from a call to vkGetBufferMemoryRequirements with buffer. %s",
memoryOffset, buffer_state->requirements.alignment, validation_error_map[VALIDATION_ERROR_02174]);
@@ -5867,8 +5824,12 @@
static const char *memory_type[3] = {"texel", "uniform", "storage"};
static const char *offset_name[3] = {"minTexelBufferOffsetAlignment", "minUniformBufferOffsetAlignment",
"minStorageBufferOffsetAlignment"};
- static const UNIQUE_VALIDATION_ERROR_CODE msgCode[3] = {VALIDATION_ERROR_00794, VALIDATION_ERROR_00795,
- VALIDATION_ERROR_00796};
+
+ // TODO: vk_validation_stats.py cannot abide braces immediately preceeding or following a validation error enum
+ // clang-format off
+ static const UNIQUE_VALIDATION_ERROR_CODE msgCode[3] = { VALIDATION_ERROR_00794, VALIDATION_ERROR_00795,
+ VALIDATION_ERROR_00796 };
+ // clang-format on
// Keep this one fresh!
const VkDeviceSize offset_requirement[3] = {
@@ -5880,12 +5841,12 @@
for (int i = 0; i < 3; i++) {
if (usage & usage_list[i]) {
if (vk_safe_modulo(memoryOffset, offset_requirement[i]) != 0) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, msgCode[i], "DS",
- "vkBindBufferMemory(): %s memoryOffset is 0x%" PRIxLEAST64 " but must be a multiple of "
- "device limit %s 0x%" PRIxLEAST64 ". %s",
- memory_type[i], memoryOffset, offset_name[i], offset_requirement[i],
- validation_error_map[msgCode[i]]);
+ skip_call |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0,
+ __LINE__, msgCode[i], "DS", "vkBindBufferMemory(): %s memoryOffset is 0x%" PRIxLEAST64
+ " but must be a multiple of "
+ "device limit %s 0x%" PRIxLEAST64 ". %s",
+ memory_type[i], memoryOffset, offset_name[i], offset_requirement[i], validation_error_map[msgCode[i]]);
}
}
}
@@ -5922,8 +5883,7 @@
VK_OBJECT *obj_struct) {
*image_view_state = getImageViewState(dev_data, image_view);
*obj_struct = {reinterpret_cast<uint64_t &>(image_view), VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT};
- if (dev_data->instance_data->disabled.destroy_image_view)
- return false;
+ if (dev_data->instance_data->disabled.destroy_image_view) return false;
bool skip = false;
if (*image_view_state) {
skip |= ValidateObjectNotInUse(dev_data, *image_view_state, *obj_struct, VALIDATION_ERROR_00776);
@@ -5968,8 +5928,7 @@
VK_OBJECT *obj_struct) {
*pipeline_state = getPipelineState(dev_data, pipeline);
*obj_struct = {reinterpret_cast<uint64_t &>(pipeline), VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT};
- if (dev_data->instance_data->disabled.destroy_pipeline)
- return false;
+ if (dev_data->instance_data->disabled.destroy_pipeline) return false;
bool skip = false;
if (*pipeline_state) {
skip |= ValidateObjectNotInUse(dev_data, *pipeline_state, *obj_struct, VALIDATION_ERROR_00555);
@@ -6012,8 +5971,7 @@
VK_OBJECT *obj_struct) {
*sampler_state = getSamplerState(dev_data, sampler);
*obj_struct = {reinterpret_cast<uint64_t &>(sampler), VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT};
- if (dev_data->instance_data->disabled.destroy_sampler)
- return false;
+ if (dev_data->instance_data->disabled.destroy_sampler) return false;
bool skip = false;
if (*sampler_state) {
skip |= ValidateObjectNotInUse(dev_data, *sampler_state, *obj_struct, VALIDATION_ERROR_00837);
@@ -6024,8 +5982,7 @@
static void PostCallRecordDestroySampler(layer_data *dev_data, VkSampler sampler, SAMPLER_STATE *sampler_state,
VK_OBJECT obj_struct) {
// Any bound cmd buffers are now invalid
- if (sampler_state)
- invalidateCommandBuffers(dev_data, sampler_state->cb_bindings, obj_struct);
+ if (sampler_state) invalidateCommandBuffers(dev_data, sampler_state->cb_bindings, obj_struct);
dev_data->samplerMap.erase(sampler);
}
@@ -6059,8 +6016,7 @@
DESCRIPTOR_POOL_STATE **desc_pool_state, VK_OBJECT *obj_struct) {
*desc_pool_state = getDescriptorPoolState(dev_data, pool);
*obj_struct = {reinterpret_cast<uint64_t &>(pool), VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT};
- if (dev_data->instance_data->disabled.destroy_descriptor_pool)
- return false;
+ if (dev_data->instance_data->disabled.destroy_descriptor_pool) return false;
bool skip = false;
if (*desc_pool_state) {
skip |= ValidateObjectNotInUse(dev_data, *desc_pool_state, *obj_struct, VALIDATION_ERROR_00901);
@@ -6146,8 +6102,7 @@
}
}
- if (skip_call)
- return;
+ if (skip_call) return;
auto pPool = getCommandPoolNode(dev_data, commandPool);
for (uint32_t i = 0; i < commandBufferCount; i++) {
@@ -6185,7 +6140,6 @@
VKAPI_ATTR VkResult VKAPI_CALL CreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool) {
-
layer_data *dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
bool skip = false;
if (pCreateInfo && pCreateInfo->queryType == VK_QUERY_TYPE_PIPELINE_STATISTICS) {
@@ -6212,8 +6166,7 @@
static bool PreCallValidateDestroyCommandPool(layer_data *dev_data, VkCommandPool pool, COMMAND_POOL_NODE **cp_state) {
*cp_state = getCommandPoolNode(dev_data, pool);
- if (dev_data->instance_data->disabled.destroy_command_pool)
- return false;
+ if (dev_data->instance_data->disabled.destroy_command_pool) return false;
bool skip = false;
if (*cp_state) {
// Verify that command buffers in pool are complete (not in-flight)
@@ -6235,11 +6188,10 @@
}
for (auto framebuffer : cb_node->framebuffers) {
auto fb_state = getFramebufferState(dev_data, framebuffer);
- if (fb_state)
- fb_state->cb_bindings.erase(cb_node);
+ if (fb_state) fb_state->cb_bindings.erase(cb_node);
}
- dev_data->commandBufferMap.erase(cb); // Remove this command buffer
- delete cb_node; // delete CB info structure
+ dev_data->commandBufferMap.erase(cb); // Remove this command buffer
+ delete cb_node; // delete CB info structure
}
dev_data->commandPoolMap.erase(pool);
}
@@ -6267,8 +6219,7 @@
skip_call |= checkCommandBuffersInFlight(dev_data, pPool, "reset command pool with", VALIDATION_ERROR_00072);
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.ResetCommandPool(device, commandPool, flags);
@@ -6299,8 +6250,7 @@
}
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.ResetFences(device, fenceCount, pFences);
@@ -6335,8 +6285,7 @@
FRAMEBUFFER_STATE **framebuffer_state, VK_OBJECT *obj_struct) {
*framebuffer_state = getFramebufferState(dev_data, framebuffer);
*obj_struct = {reinterpret_cast<uint64_t &>(framebuffer), VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT};
- if (dev_data->instance_data->disabled.destroy_framebuffer)
- return false;
+ if (dev_data->instance_data->disabled.destroy_framebuffer) return false;
bool skip = false;
if (*framebuffer_state) {
skip |= ValidateObjectNotInUse(dev_data, *framebuffer_state, *obj_struct, VALIDATION_ERROR_00422);
@@ -6368,8 +6317,7 @@
VK_OBJECT *obj_struct) {
*rp_state = getRenderPassState(dev_data, render_pass);
*obj_struct = {reinterpret_cast<uint64_t &>(render_pass), VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT};
- if (dev_data->instance_data->disabled.destroy_renderpass)
- return false;
+ if (dev_data->instance_data->disabled.destroy_renderpass) return false;
bool skip = false;
if (*rp_state) {
skip |= ValidateObjectNotInUse(dev_data, *rp_state, *obj_struct, VALIDATION_ERROR_00393);
@@ -6436,8 +6384,7 @@
std::unique_lock<std::mutex> lock(global_lock);
bool skip_call = PreCallValidateCreateBufferView(dev_data, pCreateInfo);
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.CreateBufferView(device, pCreateInfo, pAllocator, pView);
if (VK_SUCCESS == result) {
lock.lock();
@@ -6516,9 +6463,10 @@
} else if (vk_format_is_depth_and_stencil(format)) {
if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
- (uint64_t)image, __LINE__, VALIDATION_ERROR_00741, "IMAGE", "%s: Depth/stencil image formats must have "
- "at least one of VK_IMAGE_ASPECT_DEPTH_BIT "
- "and VK_IMAGE_ASPECT_STENCIL_BIT set. %s",
+ (uint64_t)image, __LINE__, VALIDATION_ERROR_00741, "IMAGE",
+ "%s: Depth/stencil image formats must have "
+ "at least one of VK_IMAGE_ASPECT_DEPTH_BIT "
+ "and VK_IMAGE_ASPECT_STENCIL_BIT set. %s",
func_name, validation_error_map[VALIDATION_ERROR_00741]);
} else if ((aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != aspect_mask) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
@@ -6647,8 +6595,7 @@
std::unique_lock<std::mutex> lock(global_lock);
bool skip = PreCallValidateCreateImageView(dev_data, pCreateInfo);
lock.unlock();
- if (skip)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.CreateImageView(device, pCreateInfo, pAllocator, pView);
if (VK_SUCCESS == result) {
lock.lock();
@@ -6860,8 +6807,7 @@
}
static bool PreCallValidateCreateDescriptorSetLayout(layer_data *dev_data, const VkDescriptorSetLayoutCreateInfo *create_info) {
- if (dev_data->instance_data->disabled.create_descriptor_set_layout)
- return false;
+ if (dev_data->instance_data->disabled.create_descriptor_set_layout) return false;
return cvdescriptorset::DescriptorSetLayout::ValidateCreateInfo(dev_data->report_data, create_info);
}
@@ -6893,8 +6839,7 @@
// Note that the index argument is optional and only used by CreatePipelineLayout.
static bool validatePushConstantRange(const layer_data *dev_data, const uint32_t offset, const uint32_t size,
const char *caller_name, uint32_t index = 0) {
- if (dev_data->instance_data->disabled.push_constant_range)
- return false;
+ if (dev_data->instance_data->disabled.push_constant_range) return false;
uint32_t const maxPushConstantsSize = dev_data->phys_dev_properties.properties.limits.maxPushConstantsSize;
bool skip_call = false;
// Check that offset + size don't exceed the max.
@@ -6905,30 +6850,34 @@
if (offset >= maxPushConstantsSize) {
skip_call |=
log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_00877, "DS", "%s call has push constants index %u with offset %u that "
- "exceeds this device's maxPushConstantSize of %u. %s",
+ VALIDATION_ERROR_00877, "DS",
+ "%s call has push constants index %u with offset %u that "
+ "exceeds this device's maxPushConstantSize of %u. %s",
caller_name, index, offset, maxPushConstantsSize, validation_error_map[VALIDATION_ERROR_00877]);
}
if (size > maxPushConstantsSize - offset) {
skip_call |=
log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_00880, "DS", "%s call has push constants index %u with offset %u and size %u that "
- "exceeds this device's maxPushConstantSize of %u. %s",
+ VALIDATION_ERROR_00880, "DS",
+ "%s call has push constants index %u with offset %u and size %u that "
+ "exceeds this device's maxPushConstantSize of %u. %s",
caller_name, index, offset, size, maxPushConstantsSize, validation_error_map[VALIDATION_ERROR_00880]);
}
} else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
if (offset >= maxPushConstantsSize) {
skip_call |=
log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_00991, "DS", "%s call has push constants index %u with offset %u that "
- "exceeds this device's maxPushConstantSize of %u. %s",
+ VALIDATION_ERROR_00991, "DS",
+ "%s call has push constants index %u with offset %u that "
+ "exceeds this device's maxPushConstantSize of %u. %s",
caller_name, index, offset, maxPushConstantsSize, validation_error_map[VALIDATION_ERROR_00991]);
}
if (size > maxPushConstantsSize - offset) {
skip_call |=
log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_00992, "DS", "%s call has push constants index %u with offset %u and size %u that "
- "exceeds this device's maxPushConstantSize of %u. %s",
+ VALIDATION_ERROR_00992, "DS",
+ "%s call has push constants index %u with offset %u and size %u that "
+ "exceeds this device's maxPushConstantSize of %u. %s",
caller_name, index, offset, size, maxPushConstantsSize, validation_error_map[VALIDATION_ERROR_00992]);
}
} else {
@@ -6941,27 +6890,31 @@
if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
if (size == 0) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, VALIDATION_ERROR_00878, "DS", "%s call has push constants index %u with "
- "size %u. Size must be greater than zero. %s",
+ __LINE__, VALIDATION_ERROR_00878, "DS",
+ "%s call has push constants index %u with "
+ "size %u. Size must be greater than zero. %s",
caller_name, index, size, validation_error_map[VALIDATION_ERROR_00878]);
}
if (size & 0x3) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, VALIDATION_ERROR_00879, "DS", "%s call has push constants index %u with "
- "size %u. Size must be a multiple of 4. %s",
+ __LINE__, VALIDATION_ERROR_00879, "DS",
+ "%s call has push constants index %u with "
+ "size %u. Size must be a multiple of 4. %s",
caller_name, index, size, validation_error_map[VALIDATION_ERROR_00879]);
}
} else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
if (size == 0) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, VALIDATION_ERROR_01000, "DS", "%s call has push constants index %u with "
- "size %u. Size must be greater than zero. %s",
+ __LINE__, VALIDATION_ERROR_01000, "DS",
+ "%s call has push constants index %u with "
+ "size %u. Size must be greater than zero. %s",
caller_name, index, size, validation_error_map[VALIDATION_ERROR_01000]);
}
if (size & 0x3) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, VALIDATION_ERROR_00990, "DS", "%s call has push constants index %u with "
- "size %u. Size must be a multiple of 4. %s",
+ __LINE__, VALIDATION_ERROR_00990, "DS",
+ "%s call has push constants index %u with "
+ "size %u. Size must be a multiple of 4. %s",
caller_name, index, size, validation_error_map[VALIDATION_ERROR_00990]);
}
} else {
@@ -6973,13 +6926,15 @@
if ((offset & 0x3) != 0) {
if (0 == strcmp(caller_name, "vkCreatePipelineLayout()")) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_02521, "DS", "%s call has push constants index %u with "
- "offset %u. Offset must be a multiple of 4. %s",
+ VALIDATION_ERROR_02521, "DS",
+ "%s call has push constants index %u with "
+ "offset %u. Offset must be a multiple of 4. %s",
caller_name, index, offset, validation_error_map[VALIDATION_ERROR_02521]);
} else if (0 == strcmp(caller_name, "vkCmdPushConstants()")) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_00989, "DS", "%s call has push constants with "
- "offset %u. Offset must be a multiple of 4. %s",
+ VALIDATION_ERROR_00989, "DS",
+ "%s call has push constants with "
+ "offset %u. Offset must be a multiple of 4. %s",
caller_name, offset, validation_error_map[VALIDATION_ERROR_00989]);
} else {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
@@ -7005,8 +6960,7 @@
validation_error_map[VALIDATION_ERROR_00882]);
}
}
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
// Each range has been validated. Now check for overlap between ranges (if they are good).
// There's no explicit Valid Usage language against this, so issue a warning instead of an error.
@@ -7017,11 +6971,11 @@
const uint32_t minB = pCreateInfo->pPushConstantRanges[j].offset;
const uint32_t maxB = minB + pCreateInfo->pPushConstantRanges[j].size;
if ((minA <= minB && maxA > minB) || (minB <= minA && maxB > minA)) {
- skip_call |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_PUSH_CONSTANTS_ERROR, "DS", "vkCreatePipelineLayout() call has push constants with "
- "overlapping ranges: %u:[%u, %u), %u:[%u, %u)",
- i, minA, maxA, j, minB, maxB);
+ skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
+ __LINE__, DRAWSTATE_PUSH_CONSTANTS_ERROR, "DS",
+ "vkCreatePipelineLayout() call has push constants with "
+ "overlapping ranges: %u:[%u, %u), %u:[%u, %u)",
+ i, minA, maxA, j, minB, maxB);
}
}
}
@@ -7084,8 +7038,7 @@
// as well as DescriptorSetLayout ptrs used for later update.
static bool PreCallValidateAllocateDescriptorSets(layer_data *dev_data, const VkDescriptorSetAllocateInfo *pAllocateInfo,
cvdescriptorset::AllocateDescriptorSetsData *common_data) {
- if (dev_data->instance_data->disabled.allocate_descriptor_sets)
- return false;
+ if (dev_data->instance_data->disabled.allocate_descriptor_sets) return false;
// All state checks for AllocateDescriptorSets is done in single function
return cvdescriptorset::ValidateAllocateDescriptorSets(dev_data->report_data, pAllocateInfo, dev_data, common_data);
}
@@ -7106,8 +7059,7 @@
bool skip_call = PreCallValidateAllocateDescriptorSets(dev_data, pAllocateInfo, &common_data);
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
@@ -7121,8 +7073,7 @@
// Verify state before freeing DescriptorSets
static bool PreCallValidateFreeDescriptorSets(const layer_data *dev_data, VkDescriptorPool pool, uint32_t count,
const VkDescriptorSet *descriptor_sets) {
- if (dev_data->instance_data->disabled.free_descriptor_sets)
- return false;
+ if (dev_data->instance_data->disabled.free_descriptor_sets) return false;
bool skip_call = false;
// First make sure sets being destroyed are not currently in-use
for (uint32_t i = 0; i < count; ++i)
@@ -7168,8 +7119,7 @@
bool skip_call = PreCallValidateFreeDescriptorSets(dev_data, descriptorPool, count, pDescriptorSets);
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.FreeDescriptorSets(device, descriptorPool, count, pDescriptorSets);
if (VK_SUCCESS == result) {
lock.lock();
@@ -7185,8 +7135,7 @@
static bool PreCallValidateUpdateDescriptorSets(layer_data *dev_data, uint32_t descriptorWriteCount,
const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
const VkCopyDescriptorSet *pDescriptorCopies) {
- if (dev_data->instance_data->disabled.update_descriptor_sets)
- return false;
+ if (dev_data->instance_data->disabled.update_descriptor_sets) return false;
// First thing to do is perform map look-ups.
// NOTE : UpdateDescriptorSets is somewhat unique in that it's operating on a number of DescriptorSets
// so we can't just do a single map look-up up-front, but do them individually in functions below
@@ -7311,7 +7260,8 @@
VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
reinterpret_cast<uint64_t>(commandBuffer), __LINE__, VALIDATION_ERROR_00112, "DS",
"vkBeginCommandBuffer(): Secondary Command "
- "Buffer (0x%p) renderPass (0x%" PRIxLEAST64 ") is incompatible w/ framebuffer "
+ "Buffer (0x%p) renderPass (0x%" PRIxLEAST64
+ ") is incompatible w/ framebuffer "
"(0x%" PRIxLEAST64 ") w/ render pass (0x%" PRIxLEAST64 ") due to: %s. %s",
commandBuffer, reinterpret_cast<const uint64_t &>(pInfo->renderPass),
reinterpret_cast<const uint64_t &>(pInfo->framebuffer),
@@ -7349,11 +7299,12 @@
}
}
if (CB_RECORDING == cb_node->state) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__,
- VALIDATION_ERROR_00103, "DS", "vkBeginCommandBuffer(): Cannot call Begin on command buffer (0x%p"
- ") in the RECORDING state. Must first call vkEndCommandBuffer(). %s",
- commandBuffer, validation_error_map[VALIDATION_ERROR_00103]);
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ (uint64_t)commandBuffer, __LINE__, VALIDATION_ERROR_00103, "DS",
+ "vkBeginCommandBuffer(): Cannot call Begin on command buffer (0x%p"
+ ") in the RECORDING state. Must first call vkEndCommandBuffer(). %s",
+ commandBuffer, validation_error_map[VALIDATION_ERROR_00103]);
} else if (CB_RECORDED == cb_node->state || (CB_INVALID == cb_node->state && CMD_END == cb_node->last_cmd)) {
VkCommandPool cmdPool = cb_node->createInfo.commandPool;
auto pPool = getCommandPoolNode(dev_data, cmdPool);
@@ -7447,8 +7398,7 @@
}
skip_call |= checkCommandBufferInFlight(dev_data, pCB, "reset", VALIDATION_ERROR_00092);
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.ResetCommandBuffer(commandBuffer, flags);
if (VK_SUCCESS == result) {
lock.lock();
@@ -7501,8 +7451,7 @@
}
}
lock.unlock();
- if (!skip)
- dev_data->dispatch_table.CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+ if (!skip) dev_data->dispatch_table.CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
}
VKAPI_ATTR void VKAPI_CALL CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount,
@@ -7517,8 +7466,7 @@
pCB->viewportMask |= ((1u << viewportCount) - 1u) << firstViewport;
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+ if (!skip_call) dev_data->dispatch_table.CmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
}
VKAPI_ATTR void VKAPI_CALL CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount,
@@ -7533,8 +7481,7 @@
pCB->scissorMask |= ((1u << scissorCount) - 1u) << firstScissor;
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+ if (!skip_call) dev_data->dispatch_table.CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
}
VKAPI_ATTR void VKAPI_CALL CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
@@ -7559,8 +7506,7 @@
}
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetLineWidth(commandBuffer, lineWidth);
+ if (!skip_call) dev_data->dispatch_table.CmdSetLineWidth(commandBuffer, lineWidth);
}
VKAPI_ATTR void VKAPI_CALL CmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp,
@@ -7590,8 +7536,7 @@
pCB->status |= CBSTATUS_BLEND_CONSTANTS_SET;
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetBlendConstants(commandBuffer, blendConstants);
+ if (!skip_call) dev_data->dispatch_table.CmdSetBlendConstants(commandBuffer, blendConstants);
}
VKAPI_ATTR void VKAPI_CALL CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) {
@@ -7605,8 +7550,7 @@
pCB->status |= CBSTATUS_DEPTH_BOUNDS_SET;
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+ if (!skip_call) dev_data->dispatch_table.CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
}
VKAPI_ATTR void VKAPI_CALL CmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask,
@@ -7621,8 +7565,7 @@
pCB->status |= CBSTATUS_STENCIL_READ_MASK_SET;
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+ if (!skip_call) dev_data->dispatch_table.CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
}
VKAPI_ATTR void VKAPI_CALL CmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) {
@@ -7636,8 +7579,7 @@
pCB->status |= CBSTATUS_STENCIL_WRITE_MASK_SET;
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+ if (!skip_call) dev_data->dispatch_table.CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
}
VKAPI_ATTR void VKAPI_CALL CmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) {
@@ -7651,8 +7593,7 @@
pCB->status |= CBSTATUS_STENCIL_REFERENCE_SET;
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetStencilReference(commandBuffer, faceMask, reference);
+ if (!skip_call) dev_data->dispatch_table.CmdSetStencilReference(commandBuffer, faceMask, reference);
}
VKAPI_ATTR void VKAPI_CALL CmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
@@ -7719,7 +7660,7 @@
"array. There must be one dynamic offset for each dynamic descriptor being bound.",
i, (uint64_t)pDescriptorSets[i], descriptor_set->GetDynamicDescriptorCount(),
(dynamicOffsetCount - totalDynamicDescriptors));
- } else { // Validate and store dynamic offsets with the set
+ } else { // Validate and store dynamic offsets with the set
// Validate Dynamic Offset Minimums
uint32_t cur_dyn_offset = totalDynamicDescriptors;
for (uint32_t d = 0; d < descriptor_set->GetTotalDescriptorCount(); d++) {
@@ -7730,8 +7671,9 @@
skip_call |= log_msg(
dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, VALIDATION_ERROR_00978,
- "DS", "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of "
- "device limit minUniformBufferOffsetAlignment 0x%" PRIxLEAST64 ". %s",
+ "DS",
+ "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of "
+ "device limit minUniformBufferOffsetAlignment 0x%" PRIxLEAST64 ". %s",
cur_dyn_offset, pDynamicOffsets[cur_dyn_offset],
dev_data->phys_dev_properties.properties.limits.minUniformBufferOffsetAlignment,
validation_error_map[VALIDATION_ERROR_00978]);
@@ -7744,8 +7686,9 @@
skip_call |= log_msg(
dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, 0, __LINE__, VALIDATION_ERROR_00978,
- "DS", "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of "
- "device limit minStorageBufferOffsetAlignment 0x%" PRIxLEAST64 ". %s",
+ "DS",
+ "vkCmdBindDescriptorSets(): pDynamicOffsets[%d] is %d but must be a multiple of "
+ "device limit minStorageBufferOffsetAlignment 0x%" PRIxLEAST64 ". %s",
cur_dyn_offset, pDynamicOffsets[cur_dyn_offset],
dev_data->phys_dev_properties.properties.limits.minStorageBufferOffsetAlignment,
validation_error_map[VALIDATION_ERROR_00978]);
@@ -7770,7 +7713,7 @@
skip_call |= ValidateCmd(dev_data, pCB, CMD_BINDDESCRIPTORSETS, "vkCmdBindDescriptorSets()");
UpdateCmdBufferLastCmd(dev_data, pCB, CMD_BINDDESCRIPTORSETS);
// For any previously bound sets, need to set them to "invalid" if they were disturbed by this update
- if (firstSet > 0) { // Check set #s below the first bound set
+ if (firstSet > 0) { // Check set #s below the first bound set
for (uint32_t i = 0; i < firstSet; ++i) {
if (pCB->lastBound[pipelineBindPoint].boundDescriptorSets[i] &&
!verify_set_layout_compatibility(dev_data, pCB->lastBound[pipelineBindPoint].boundDescriptorSets[i],
@@ -7843,15 +7786,15 @@
UpdateCmdBufferLastCmd(dev_data, cb_node, CMD_BINDINDEXBUFFER);
VkDeviceSize offset_align = 0;
switch (indexType) {
- case VK_INDEX_TYPE_UINT16:
- offset_align = 2;
- break;
- case VK_INDEX_TYPE_UINT32:
- offset_align = 4;
- break;
- default:
- // ParamChecker should catch bad enum, we'll also throw alignment error below if offset_align stays 0
- break;
+ case VK_INDEX_TYPE_UINT16:
+ offset_align = 2;
+ break;
+ case VK_INDEX_TYPE_UINT32:
+ offset_align = 4;
+ break;
+ default:
+ // ParamChecker should catch bad enum, we'll also throw alignment error below if offset_align stays 0
+ break;
}
if (!offset_align || (offset % offset_align)) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
@@ -7864,8 +7807,7 @@
assert(0);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+ if (!skip_call) dev_data->dispatch_table.CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
}
void updateResourceTracking(GLOBAL_CB_NODE *pCB, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers) {
@@ -7905,16 +7847,14 @@
skip_call |= report_error_no_cb_begin(dev_data, commandBuffer, "vkCmdBindVertexBuffer()");
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+ if (!skip_call) dev_data->dispatch_table.CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
}
// Expects global_lock to be held by caller
static void MarkStoreImagesAndBuffersAsWritten(layer_data *dev_data, GLOBAL_CB_NODE *pCB) {
for (auto imageView : pCB->updateImages) {
auto view_state = getImageViewState(dev_data, imageView);
- if (!view_state)
- continue;
+ if (!view_state) continue;
auto image_state = getImageState(dev_data, view_state->create_info.image);
assert(image_state);
@@ -8179,8 +8119,7 @@
assert(0);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+ if (!skip_call) dev_data->dispatch_table.CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
}
static bool VerifySourceImageLayout(layer_data *dev_data, GLOBAL_CB_NODE *cb_node, VkImage srcImage,
@@ -8198,11 +8137,11 @@
}
if (node.layout != srcImageLayout) {
// TODO: Improve log message in the next pass
- skip_call |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
- __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", "Cannot copy from an image whose source layout is %s "
- "and doesn't match the current layout %s.",
- string_VkImageLayout(srcImageLayout), string_VkImageLayout(node.layout));
+ skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Cannot copy from an image whose source layout is %s "
+ "and doesn't match the current layout %s.",
+ string_VkImageLayout(srcImageLayout), string_VkImageLayout(node.layout));
}
}
if (srcImageLayout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
@@ -8238,11 +8177,11 @@
continue;
}
if (node.layout != destImageLayout) {
- skip_call |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
- __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", "Cannot copy from an image whose dest layout is %s and "
- "doesn't match the current layout %s.",
- string_VkImageLayout(destImageLayout), string_VkImageLayout(node.layout));
+ skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Cannot copy from an image whose dest layout is %s and "
+ "doesn't match the current layout %s.",
+ string_VkImageLayout(destImageLayout), string_VkImageLayout(node.layout));
}
}
if (destImageLayout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
@@ -8287,8 +8226,9 @@
assert(strcmp(func_name, "vkCmdClearColorImage()") == 0);
}
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- error_code, "DS", "%s: Layout for cleared image is %s but can only be "
- "TRANSFER_DST_OPTIMAL or GENERAL. %s",
+ error_code, "DS",
+ "%s: Layout for cleared image is %s but can only be "
+ "TRANSFER_DST_OPTIMAL or GENERAL. %s",
func_name, string_VkImageLayout(dest_image_layout), validation_error_map[error_code]);
}
}
@@ -8310,12 +8250,12 @@
} else {
assert(strcmp(func_name, "vkCmdClearColorImage()") == 0);
}
- skip |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0,
- __LINE__, error_code, "DS", "%s: Cannot clear an image whose layout is %s and "
- "doesn't match the current layout %s. %s",
- func_name, string_VkImageLayout(dest_image_layout), string_VkImageLayout(node.layout),
- validation_error_map[error_code]);
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, 0, __LINE__, error_code, "DS",
+ "%s: Cannot clear an image whose layout is %s and "
+ "doesn't match the current layout %s. %s",
+ func_name, string_VkImageLayout(dest_image_layout), string_VkImageLayout(node.layout),
+ validation_error_map[error_code]);
}
}
}
@@ -8757,8 +8697,7 @@
assert(0);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+ if (!skip_call) dev_data->dispatch_table.CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
}
VKAPI_ATTR void VKAPI_CALL CmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset,
@@ -8789,8 +8728,7 @@
assert(0);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+ if (!skip_call) dev_data->dispatch_table.CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
}
// Returns true if sub_rect is entirely contained within rect
@@ -8852,9 +8790,9 @@
.pAttachments[subpass_desc->pColorAttachments[clear_desc->colorAttachment].attachment];
}
} else if (clear_desc->aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) {
- if (!subpass_desc->pDepthStencilAttachment || // Says no DS will be used in active subpass
+ if (!subpass_desc->pDepthStencilAttachment || // Says no DS will be used in active subpass
(subpass_desc->pDepthStencilAttachment->attachment ==
- VK_ATTACHMENT_UNUSED)) { // Says no DS will be used in active subpass
+ VK_ATTACHMENT_UNUSED)) { // Says no DS will be used in active subpass
skip |=
log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
@@ -8906,8 +8844,7 @@
std::lock_guard<std::mutex> lock(global_lock);
skip = PreCallValidateCmdClearAttachments(dev_data, commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
}
- if (!skip)
- dev_data->dispatch_table.CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+ if (!skip) dev_data->dispatch_table.CmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
}
VKAPI_ATTR void VKAPI_CALL CmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
@@ -8939,8 +8876,7 @@
skip_call |= VerifyClearImageLayout(dev_data, cb_node, image, pRanges[i], imageLayout, "vkCmdClearColorImage()");
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+ if (!skip_call) dev_data->dispatch_table.CmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
}
VKAPI_ATTR void VKAPI_CALL CmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout,
@@ -9053,8 +8989,7 @@
pCB->eventUpdates.push_back(eventUpdate);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdSetEvent(commandBuffer, event, stageMask);
+ if (!skip_call) dev_data->dispatch_table.CmdSetEvent(commandBuffer, event, stageMask);
}
VKAPI_ATTR void VKAPI_CALL CmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
@@ -9084,8 +9019,7 @@
pCB->eventUpdates.push_back(eventUpdate);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdResetEvent(commandBuffer, event, stageMask);
+ if (!skip_call) dev_data->dispatch_table.CmdResetEvent(commandBuffer, event, stageMask);
}
static bool TransitionImageAspectLayout(layer_data *dev_data, GLOBAL_CB_NODE *pCB, const VkImageMemoryBarrier *mem_barrier,
@@ -9123,8 +9057,7 @@
for (uint32_t i = 0; i < memBarrierCount; ++i) {
auto mem_barrier = &pImgMemBarriers[i];
- if (!mem_barrier)
- continue;
+ if (!mem_barrier) continue;
// TODO: Do not iterate over every possibility - consolidate where
// possible
ResolveRemainingLevelsLayers(dev_data, &levelCount, &layerCount, mem_barrier->subresourceRange, mem_barrier->image);
@@ -9182,9 +9115,10 @@
} else {
if (!required_bit) {
skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_BARRIER, "DS", "%s AccessMask %d %s must contain at least one of access bits %d "
- "%s when layout is %s, unless the app has previously added a "
- "barrier for this transition.",
+ DRAWSTATE_INVALID_BARRIER, "DS",
+ "%s AccessMask %d %s must contain at least one of access bits %d "
+ "%s when layout is %s, unless the app has previously added a "
+ "barrier for this transition.",
type, accessMask, string_VkAccessFlags(accessMask).c_str(), optional_bits,
string_VkAccessFlags(optional_bits).c_str(), string_VkImageLayout(layout));
} else {
@@ -9195,9 +9129,10 @@
opt_bits = "and may have optional bits " + ss.str() + ' ' + string_VkAccessFlags(optional_bits);
}
skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_BARRIER, "DS", "%s AccessMask %d %s must have required access bit %d %s %s when "
- "layout is %s, unless the app has previously added a barrier for "
- "this transition.",
+ DRAWSTATE_INVALID_BARRIER, "DS",
+ "%s AccessMask %d %s must have required access bit %d %s %s when "
+ "layout is %s, unless the app has previously added a barrier for "
+ "this transition.",
type, accessMask, string_VkAccessFlags(accessMask).c_str(), required_bit,
string_VkAccessFlags(required_bit).c_str(), opt_bits.c_str(), string_VkImageLayout(layout));
}
@@ -9209,51 +9144,52 @@
const VkImageLayout &layout, const char *type) {
bool skip_call = false;
switch (layout) {
- case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
- VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, type);
- break;
- }
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
- VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, type);
- break;
- }
- case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_TRANSFER_WRITE_BIT, 0, type);
- break;
- }
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: {
- skip_call |= ValidateMaskBits(
- my_data, cmdBuffer, accessMask, layout, 0,
- VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, type);
- break;
- }
- case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, 0,
- VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT, type);
- break;
- }
- case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_TRANSFER_READ_BIT, 0, type);
- break;
- }
- case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: {
- skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_MEMORY_READ_BIT, 0, type);
- break;
- }
- case VK_IMAGE_LAYOUT_UNDEFINED: {
- if (accessMask != 0) {
- // TODO: Verify against Valid Use section spec
- skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_BARRIER, "DS",
- "Additional bits in %s accessMask 0x%X %s are specified when layout is %s.", type, accessMask,
- string_VkAccessFlags(accessMask).c_str(), string_VkImageLayout(layout));
+ case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, type);
+ break;
}
- break;
- }
- case VK_IMAGE_LAYOUT_GENERAL:
- default: { break; }
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_TRANSFER_WRITE_BIT, 0, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: {
+ skip_call |= ValidateMaskBits(
+ my_data, cmdBuffer, accessMask, layout, 0,
+ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
+ type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, 0,
+ VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_TRANSFER_READ_BIT, 0, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: {
+ skip_call |= ValidateMaskBits(my_data, cmdBuffer, accessMask, layout, VK_ACCESS_MEMORY_READ_BIT, 0, type);
+ break;
+ }
+ case VK_IMAGE_LAYOUT_UNDEFINED: {
+ if (accessMask != 0) {
+ // TODO: Verify against Valid Use section spec
+ skip_call |= log_msg(my_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
+ __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
+ "Additional bits in %s accessMask 0x%X %s are specified when layout is %s.", type, accessMask,
+ string_VkAccessFlags(accessMask).c_str(), string_VkImageLayout(layout));
+ }
+ break;
+ }
+ case VK_IMAGE_LAYOUT_GENERAL:
+ default: { break; }
}
return skip_call;
}
@@ -9268,8 +9204,9 @@
if (pCB->activeRenderPass && memBarrierCount) {
if (!pCB->activeRenderPass->hasSelfDependency[pCB->activeSubpass]) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_BARRIER, "DS", "%s: Barriers cannot be set during subpass %d "
- "with no self dependency specified.",
+ DRAWSTATE_INVALID_BARRIER, "DS",
+ "%s: Barriers cannot be set during subpass %d "
+ "with no self dependency specified.",
funcName, pCB->activeSubpass);
}
}
@@ -9283,12 +9220,13 @@
// srcQueueFamilyIndex and dstQueueFamilyIndex must both
// be VK_QUEUE_FAMILY_IGNORED
if ((src_q_f_index != VK_QUEUE_FAMILY_IGNORED) || (dst_q_f_index != VK_QUEUE_FAMILY_IGNORED)) {
- skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, DRAWSTATE_INVALID_QUEUE_INDEX, "DS",
- "%s: Image Barrier for image 0x%" PRIx64 " was created with sharingMode of "
- "VK_SHARING_MODE_CONCURRENT. Src and dst "
- "queueFamilyIndices must be VK_QUEUE_FAMILY_IGNORED.",
- funcName, reinterpret_cast<const uint64_t &>(mem_barrier->image));
+ skip |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_QUEUE_INDEX, "DS", "%s: Image Barrier for image 0x%" PRIx64
+ " was created with sharingMode of "
+ "VK_SHARING_MODE_CONCURRENT. Src and dst "
+ "queueFamilyIndices must be VK_QUEUE_FAMILY_IGNORED.",
+ funcName, reinterpret_cast<const uint64_t &>(mem_barrier->image));
}
} else {
// Sharing mode is VK_SHARING_MODE_EXCLUSIVE. srcQueueFamilyIndex and
@@ -9298,7 +9236,8 @@
(src_q_f_index != dst_q_f_index)) {
skip |=
log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_QUEUE_INDEX, "DS", "%s: Image 0x%" PRIx64 " was created with sharingMode "
+ DRAWSTATE_INVALID_QUEUE_INDEX, "DS", "%s: Image 0x%" PRIx64
+ " was created with sharingMode "
"of VK_SHARING_MODE_EXCLUSIVE. If one of src- or "
"dstQueueFamilyIndex is VK_QUEUE_FAMILY_IGNORED, both "
"must be.",
@@ -9308,7 +9247,8 @@
(dst_q_f_index >= dev_data->phys_dev_properties.queue_family_properties.size()))) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
__LINE__, DRAWSTATE_INVALID_QUEUE_INDEX, "DS",
- "%s: Image 0x%" PRIx64 " was created with sharingMode "
+ "%s: Image 0x%" PRIx64
+ " was created with sharingMode "
"of VK_SHARING_MODE_EXCLUSIVE, but srcQueueFamilyIndex %d"
" or dstQueueFamilyIndex %d is greater than " PRINTF_SIZE_T_SPECIFIER
"queueFamilies crated for this device.",
@@ -9327,8 +9267,9 @@
}
if (mem_barrier->newLayout == VK_IMAGE_LAYOUT_UNDEFINED || mem_barrier->newLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_BARRIER, "DS", "%s: Image Layout cannot be transitioned to UNDEFINED or "
- "PREINITIALIZED.",
+ DRAWSTATE_INVALID_BARRIER, "DS",
+ "%s: Image Layout cannot be transitioned to UNDEFINED or "
+ "PREINITIALIZED.",
funcName);
}
auto image_data = getImageState(dev_data, mem_barrier->image);
@@ -9360,9 +9301,10 @@
: mem_barrier->subresourceRange.layerCount;
if ((mem_barrier->subresourceRange.baseArrayLayer + layerCount) > arrayLayers) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, DRAWSTATE_INVALID_BARRIER, "DS", "%s: Subresource must have the sum of the "
- "baseArrayLayer (%d) and layerCount (%d) be less "
- "than or equal to the total number of layers (%d).",
+ __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
+ "%s: Subresource must have the sum of the "
+ "baseArrayLayer (%d) and layerCount (%d) be less "
+ "than or equal to the total number of layers (%d).",
funcName, mem_barrier->subresourceRange.baseArrayLayer,
mem_barrier->subresourceRange.layerCount, arrayLayers);
}
@@ -9370,12 +9312,13 @@
? 1
: mem_barrier->subresourceRange.levelCount;
if ((mem_barrier->subresourceRange.baseMipLevel + levelCount) > mipLevels) {
- skip |= log_msg(
- dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_BARRIER, "DS", "%s: Subresource must have the sum of the baseMipLevel "
- "(%d) and levelCount (%d) be less than or equal to "
- "the total number of levels (%d).",
- funcName, mem_barrier->subresourceRange.baseMipLevel, mem_barrier->subresourceRange.levelCount, mipLevels);
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
+ __LINE__, DRAWSTATE_INVALID_BARRIER, "DS",
+ "%s: Subresource must have the sum of the baseMipLevel "
+ "(%d) and levelCount (%d) be less than or equal to "
+ "the total number of levels (%d).",
+ funcName, mem_barrier->subresourceRange.baseMipLevel, mem_barrier->subresourceRange.levelCount,
+ mipLevels);
}
}
}
@@ -9386,8 +9329,7 @@
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
DRAWSTATE_INVALID_BARRIER, "DS", "%s: Buffer Barriers cannot be used during a render pass.", funcName);
}
- if (!mem_barrier)
- continue;
+ if (!mem_barrier) continue;
// Validate buffer barrier queue family indices
if ((mem_barrier->srcQueueFamilyIndex != VK_QUEUE_FAMILY_IGNORED &&
@@ -9396,7 +9338,8 @@
mem_barrier->dstQueueFamilyIndex >= dev_data->phys_dev_properties.queue_family_properties.size())) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
DRAWSTATE_INVALID_QUEUE_INDEX, "DS",
- "%s: Buffer Barrier 0x%" PRIx64 " has QueueFamilyIndex greater "
+ "%s: Buffer Barrier 0x%" PRIx64
+ " has QueueFamilyIndex greater "
"than the number of QueueFamilies (" PRINTF_SIZE_T_SPECIFIER ") for this device.",
funcName, reinterpret_cast<const uint64_t &>(mem_barrier->buffer),
dev_data->phys_dev_properties.queue_family_properties.size());
@@ -9434,8 +9377,7 @@
for (uint32_t i = 0; i < eventCount; ++i) {
auto event = pCB->events[firstEventIndex + i];
auto queue_data = dev_data->queueMap.find(queue);
- if (queue_data == dev_data->queueMap.end())
- return false;
+ if (queue_data == dev_data->queueMap.end()) return false;
auto event_data = queue_data->second.eventToStageMap.find(event);
if (event_data != queue_data->second.eventToStageMap.end()) {
stageMask |= event_data->second;
@@ -9455,11 +9397,12 @@
// but set event can be called at any time.
if (sourceStageMask != stageMask && sourceStageMask != (stageMask | VK_PIPELINE_STAGE_HOST_BIT)) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_00254, "DS", "Submitting cmdbuffer with call to VkCmdWaitEvents "
- "using srcStageMask 0x%X which must be the bitwise "
- "OR of the stageMask parameters used in calls to "
- "vkCmdSetEvent and VK_PIPELINE_STAGE_HOST_BIT if "
- "used with vkSetEvent but instead is 0x%X. %s",
+ VALIDATION_ERROR_00254, "DS",
+ "Submitting cmdbuffer with call to VkCmdWaitEvents "
+ "using srcStageMask 0x%X which must be the bitwise "
+ "OR of the stageMask parameters used in calls to "
+ "vkCmdSetEvent and VK_PIPELINE_STAGE_HOST_BIT if "
+ "used with vkSetEvent but instead is 0x%X. %s",
sourceStageMask, stageMask, validation_error_map[VALIDATION_ERROR_00254]);
}
return skip_call;
@@ -9652,8 +9595,7 @@
{reinterpret_cast<uint64_t &>(queryPool), VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT}, pCB);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdBeginQuery(commandBuffer, queryPool, slot, flags);
+ if (!skip_call) dev_data->dispatch_table.CmdBeginQuery(commandBuffer, queryPool, slot, flags);
}
VKAPI_ATTR void VKAPI_CALL CmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t slot) {
@@ -9683,8 +9625,7 @@
{reinterpret_cast<uint64_t &>(queryPool), VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT}, pCB);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdEndQuery(commandBuffer, queryPool, slot);
+ if (!skip_call) dev_data->dispatch_table.CmdEndQuery(commandBuffer, queryPool, slot);
}
VKAPI_ATTR void VKAPI_CALL CmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery,
@@ -9711,16 +9652,14 @@
{reinterpret_cast<uint64_t &>(queryPool), VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT}, pCB);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+ if (!skip_call) dev_data->dispatch_table.CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
}
bool validateQuery(VkQueue queue, GLOBAL_CB_NODE *pCB, VkQueryPool queryPool, uint32_t queryCount, uint32_t firstQuery) {
bool skip_call = false;
layer_data *dev_data = get_my_data_ptr(get_dispatch_key(pCB->commandBuffer), layer_data_map);
auto queue_data = dev_data->queueMap.find(queue);
- if (queue_data == dev_data->queueMap.end())
- return false;
+ if (queue_data == dev_data->queueMap.end()) return false;
for (uint32_t i = 0; i < queryCount; i++) {
QueryObject query = {queryPool, firstQuery + i};
auto query_data = queue_data->second.queryToStateMap.find(query);
@@ -9834,7 +9773,8 @@
// There were no ranges that matched the stageFlags.
skip_call |=
log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_00988, "DS", "vkCmdPushConstants() stageFlags = 0x%" PRIx32 " do not match "
+ VALIDATION_ERROR_00988, "DS", "vkCmdPushConstants() stageFlags = 0x%" PRIx32
+ " do not match "
"the stageFlags in any of the ranges in pipeline layout 0x%" PRIx64 ". %s",
(uint32_t)stageFlags, (uint64_t)layout, validation_error_map[VALIDATION_ERROR_00988]);
} else {
@@ -9871,17 +9811,18 @@
}
}
if (!contained_in_a_range) {
- skip_call |= log_msg(
- dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_00988, "DS", "vkCmdPushConstants() Push constant range [%d, %d) "
- "with stageFlags = 0x%" PRIx32 " "
- "not within flag-matching ranges in pipeline layout 0x%" PRIx64 ". %s",
- offset, offset + size, (uint32_t)stageFlags, (uint64_t)layout, validation_error_map[VALIDATION_ERROR_00988]);
+ skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ VALIDATION_ERROR_00988, "DS",
+ "vkCmdPushConstants() Push constant range [%d, %d) "
+ "with stageFlags = 0x%" PRIx32
+ " "
+ "not within flag-matching ranges in pipeline layout 0x%" PRIx64 ". %s",
+ offset, offset + size, (uint32_t)stageFlags, (uint64_t)layout,
+ validation_error_map[VALIDATION_ERROR_00988]);
}
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+ if (!skip_call) dev_data->dispatch_table.CmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
}
VKAPI_ATTR void VKAPI_CALL CmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage,
@@ -9902,8 +9843,7 @@
}
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
+ if (!skip_call) dev_data->dispatch_table.CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
}
static bool MatchUsage(layer_data *dev_data, uint32_t count, const VkAttachmentReference *attachments,
@@ -10119,8 +10059,7 @@
bool skip_call = PreCallValidateCreateFramebuffer(dev_data, pCreateInfo);
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.CreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
@@ -10135,15 +10074,13 @@
static bool FindDependency(const int index, const int dependent, const std::vector<DAGNode> &subpass_to_node,
std::unordered_set<uint32_t> &processed_nodes) {
// If we have already checked this node we have not found a dependency path so return false.
- if (processed_nodes.count(index))
- return false;
+ if (processed_nodes.count(index)) return false;
processed_nodes.insert(index);
const DAGNode &node = subpass_to_node[index];
// Look for a dependency path. If one exists return true else recurse on the previous nodes.
if (std::find(node.prev.begin(), node.prev.end(), static_cast<uint32_t>(dependent)) == node.prev.end()) {
for (auto elem : node.prev) {
- if (FindDependency(elem, dependent, subpass_to_node, processed_nodes))
- return true;
+ if (FindDependency(elem, dependent, subpass_to_node, processed_nodes)) return true;
}
} else {
return true;
@@ -10156,8 +10093,7 @@
bool result = true;
// Loop through all subpasses that share the same attachment and make sure a dependency exists
for (uint32_t k = 0; k < dependent_subpasses.size(); ++k) {
- if (static_cast<uint32_t>(subpass) == dependent_subpasses[k])
- continue;
+ if (static_cast<uint32_t>(subpass) == dependent_subpasses[k]) continue;
const DAGNode &node = subpass_to_node[subpass];
// Check for a specified dependency between the two nodes. If one exists we are done.
auto prev_elem = std::find(node.prev.begin(), node.prev.end(), dependent_subpasses[k]);
@@ -10184,12 +10120,10 @@
// If this node writes to the attachment return true as next nodes need to preserve the attachment.
const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[index];
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
- if (attachment == subpass.pColorAttachments[j].attachment)
- return true;
+ if (attachment == subpass.pColorAttachments[j].attachment) return true;
}
if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
- if (attachment == subpass.pDepthStencilAttachment->attachment)
- return true;
+ if (attachment == subpass.pDepthStencilAttachment->attachment) return true;
}
bool result = false;
// Loop through previous nodes and see if any of them write to the attachment.
@@ -10216,7 +10150,8 @@
return result;
}
-template <class T> bool isRangeOverlapping(T offset1, T size1, T offset2, T size2) {
+template <class T>
+bool isRangeOverlapping(T offset1, T size1, T offset2, T size2) {
return (((offset1 + size1) > offset2) && ((offset1 + size1) < (offset2 + size2))) ||
((offset1 > offset2) && (offset1 < (offset2 + size2)));
}
@@ -10275,14 +10210,16 @@
for (auto other_attachment : overlapping_attachments[i]) {
if (!(pCreateInfo->pAttachments[attachment].flags & VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT)) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, VALIDATION_ERROR_00324, "DS", "Attachment %d aliases attachment %d but doesn't "
- "set VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT. %s",
+ __LINE__, VALIDATION_ERROR_00324, "DS",
+ "Attachment %d aliases attachment %d but doesn't "
+ "set VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT. %s",
attachment, other_attachment, validation_error_map[VALIDATION_ERROR_00324]);
}
if (!(pCreateInfo->pAttachments[other_attachment].flags & VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT)) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, VALIDATION_ERROR_00324, "DS", "Attachment %d aliases attachment %d but doesn't "
- "set VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT. %s",
+ __LINE__, VALIDATION_ERROR_00324, "DS",
+ "Attachment %d aliases attachment %d but doesn't "
+ "set VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT. %s",
other_attachment, attachment, validation_error_map[VALIDATION_ERROR_00324]);
}
}
@@ -10294,8 +10231,7 @@
attachmentIndices.clear();
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
uint32_t attachment = subpass.pInputAttachments[j].attachment;
- if (attachment == VK_ATTACHMENT_UNUSED)
- continue;
+ if (attachment == VK_ATTACHMENT_UNUSED) continue;
input_attachment_to_subpass[attachment].push_back(i);
for (auto overlapping_attachment : overlapping_attachments[attachment]) {
input_attachment_to_subpass[overlapping_attachment].push_back(i);
@@ -10303,8 +10239,7 @@
}
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
uint32_t attachment = subpass.pColorAttachments[j].attachment;
- if (attachment == VK_ATTACHMENT_UNUSED)
- continue;
+ if (attachment == VK_ATTACHMENT_UNUSED) continue;
output_attachment_to_subpass[attachment].push_back(i);
for (auto overlapping_attachment : overlapping_attachments[attachment]) {
output_attachment_to_subpass[overlapping_attachment].push_back(i);
@@ -10332,15 +10267,13 @@
// If the attachment is an input then all subpasses that output must have a dependency relationship
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
uint32_t attachment = subpass.pInputAttachments[j].attachment;
- if (attachment == VK_ATTACHMENT_UNUSED)
- continue;
+ if (attachment == VK_ATTACHMENT_UNUSED) continue;
CheckDependencyExists(dev_data, i, output_attachment_to_subpass[attachment], subpass_to_node, skip_call);
}
// If the attachment is an output then all subpasses that use the attachment must have a dependency relationship
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
uint32_t attachment = subpass.pColorAttachments[j].attachment;
- if (attachment == VK_ATTACHMENT_UNUSED)
- continue;
+ if (attachment == VK_ATTACHMENT_UNUSED) continue;
CheckDependencyExists(dev_data, i, output_attachment_to_subpass[attachment], subpass_to_node, skip_call);
CheckDependencyExists(dev_data, i, input_attachment_to_subpass[attachment], subpass_to_node, skip_call);
}
@@ -10389,26 +10322,25 @@
const VkSubpassDescription &subpass = pCreateInfo->pSubpasses[i];
for (uint32_t j = 0; j < subpass.colorAttachmentCount; ++j) {
auto attach_index = subpass.pColorAttachments[j].attachment;
- if (attach_index == VK_ATTACHMENT_UNUSED)
- continue;
+ if (attach_index == VK_ATTACHMENT_UNUSED) continue;
switch (subpass.pColorAttachments[j].layout) {
- case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
- // This is ideal.
- break;
+ case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+ // This is ideal.
+ break;
- case VK_IMAGE_LAYOUT_GENERAL:
- // May not be optimal; TODO: reconsider this warning based on other constraints?
- skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL.");
- break;
+ case VK_IMAGE_LAYOUT_GENERAL:
+ // May not be optimal; TODO: reconsider this warning based on other constraints?
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for color attachment is GENERAL but should be COLOR_ATTACHMENT_OPTIMAL.");
+ break;
- default:
- skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for color attachment is %s but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
- string_VkImageLayout(subpass.pColorAttachments[j].layout));
+ default:
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for color attachment is %s but can only be COLOR_ATTACHMENT_OPTIMAL or GENERAL.",
+ string_VkImageLayout(subpass.pColorAttachments[j].layout));
}
if (attach_first_use[attach_index]) {
@@ -10419,26 +10351,27 @@
}
if (subpass.pDepthStencilAttachment && subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED) {
switch (subpass.pDepthStencilAttachment->layout) {
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
- // These are ideal.
- break;
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+ // These are ideal.
+ break;
- case VK_IMAGE_LAYOUT_GENERAL:
- // May not be optimal; TODO: reconsider this warning based on other constraints? GENERAL can be better than doing
- // a bunch of transitions.
- skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "GENERAL layout for depth attachment may not give optimal performance.");
- break;
+ case VK_IMAGE_LAYOUT_GENERAL:
+ // May not be optimal; TODO: reconsider this warning based on other constraints? GENERAL can be better than
+ // doing
+ // a bunch of transitions.
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "GENERAL layout for depth attachment may not give optimal performance.");
+ break;
- default:
- // No other layouts are acceptable
- skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for depth attachment is %s but can only be DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
- "DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.",
- string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
+ default:
+ // No other layouts are acceptable
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+ 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for depth attachment is %s but can only be DEPTH_STENCIL_ATTACHMENT_OPTIMAL, "
+ "DEPTH_STENCIL_READ_ONLY_OPTIMAL or GENERAL.",
+ string_VkImageLayout(subpass.pDepthStencilAttachment->layout));
}
auto attach_index = subpass.pDepthStencilAttachment->attachment;
@@ -10450,28 +10383,27 @@
}
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
auto attach_index = subpass.pInputAttachments[j].attachment;
- if (attach_index == VK_ATTACHMENT_UNUSED)
- continue;
+ if (attach_index == VK_ATTACHMENT_UNUSED) continue;
switch (subpass.pInputAttachments[j].layout) {
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
- case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
- // These are ideal.
- break;
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+ case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
+ // These are ideal.
+ break;
- case VK_IMAGE_LAYOUT_GENERAL:
- // May not be optimal. TODO: reconsider this warning based on other constraints.
- skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL.");
- break;
+ case VK_IMAGE_LAYOUT_GENERAL:
+ // May not be optimal. TODO: reconsider this warning based on other constraints.
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for input attachment is GENERAL but should be READ_ONLY_OPTIMAL.");
+ break;
- default:
- // No other layouts are acceptable
- skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
- "Layout for input attachment is %s but can only be READ_ONLY_OPTIMAL or GENERAL.",
- string_VkImageLayout(subpass.pInputAttachments[j].layout));
+ default:
+ // No other layouts are acceptable
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
+ __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Layout for input attachment is %s but can only be READ_ONLY_OPTIMAL or GENERAL.",
+ string_VkImageLayout(subpass.pInputAttachments[j].layout));
}
if (attach_first_use[attach_index]) {
@@ -10534,8 +10466,7 @@
spvDiagnosticDestroy(diag);
spvContextDestroy(ctx);
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult res = dev_data->dispatch_table.CreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
@@ -10636,8 +10567,9 @@
if (sample_count && !IsPowerOfTwo(sample_count)) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VkDebugReportObjectTypeEXT(0), 0, __LINE__,
- VALIDATION_ERROR_00337, "DS", "CreateRenderPass: Subpass %u attempts to render to "
- "attachments with inconsistent sample counts. %s",
+ VALIDATION_ERROR_00337, "DS",
+ "CreateRenderPass: Subpass %u attempts to render to "
+ "attachments with inconsistent sample counts. %s",
i, validation_error_map[VALIDATION_ERROR_00337]);
}
}
@@ -10723,8 +10655,9 @@
auto const &framebufferInfo = dev_data->frameBufferMap[pRenderPassBegin->framebuffer]->createInfo;
if (pRenderPassInfo->attachmentCount != framebufferInfo.attachmentCount) {
skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_RENDERPASS, "DS", "You cannot start a render pass using a framebuffer "
- "with a different number of attachments.");
+ DRAWSTATE_INVALID_RENDERPASS, "DS",
+ "You cannot start a render pass using a framebuffer "
+ "with a different number of attachments.");
}
for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
const VkImageView &image_view = framebufferInfo.pAttachments[i];
@@ -10772,12 +10705,10 @@
static void TransitionSubpassLayouts(layer_data *dev_data, GLOBAL_CB_NODE *pCB, const VkRenderPassBeginInfo *pRenderPassBegin,
const int subpass_index) {
auto renderPass = getRenderPassState(dev_data, pRenderPassBegin->renderPass);
- if (!renderPass)
- return;
+ if (!renderPass) return;
auto framebuffer = getFramebufferState(dev_data, pRenderPassBegin->framebuffer);
- if (!framebuffer)
- return;
+ if (!framebuffer) return;
auto const &subpass = renderPass->createInfo.pSubpasses[subpass_index];
for (uint32_t j = 0; j < subpass.inputAttachmentCount; ++j) {
@@ -10805,13 +10736,11 @@
static void TransitionFinalSubpassLayouts(layer_data *dev_data, GLOBAL_CB_NODE *pCB,
const VkRenderPassBeginInfo *pRenderPassBegin) {
auto renderPass = getRenderPassState(dev_data, pRenderPassBegin->renderPass);
- if (!renderPass)
- return;
+ if (!renderPass) return;
const VkRenderPassCreateInfo *pRenderPassInfo = renderPass->createInfo.ptr();
auto framebuffer = getFramebufferState(dev_data, pRenderPassBegin->framebuffer);
- if (!framebuffer)
- return;
+ if (!framebuffer) return;
for (uint32_t i = 0; i < pRenderPassInfo->attachmentCount; ++i) {
auto image_view = framebuffer->createInfo.pAttachments[i];
@@ -10842,7 +10771,8 @@
// If this is a stencil format, make sure the stencil[Load|Store]Op flag is checked, while if it is a depth/color attachment the
// [load|store]Op flag must be checked
// TODO: The memory valid flag in DEVICE_MEM_INFO should probably be split to track the validity of stencil memory separately.
-template <typename T> static bool FormatSpecificLoadAndStoreOpSettings(VkFormat format, T color_depth_op, T stencil_op, T op) {
+template <typename T>
+static bool FormatSpecificLoadAndStoreOpSettings(VkFormat format, T color_depth_op, T stencil_op, T op) {
if (color_depth_op != op && stencil_op != op) {
return false;
}
@@ -10863,7 +10793,7 @@
auto framebuffer = pRenderPassBegin ? getFramebufferState(dev_data, pRenderPassBegin->framebuffer) : nullptr;
if (cb_node) {
if (renderPass) {
- uint32_t clear_op_size = 0; // Make sure pClearValues is at least as large as last LOAD_OP_CLEAR
+ uint32_t clear_op_size = 0; // Make sure pClearValues is at least as large as last LOAD_OP_CLEAR
cb_node->activeFramebuffer = pRenderPassBegin->framebuffer;
for (uint32_t i = 0; i < renderPass->createInfo.attachmentCount; ++i) {
MT_FB_ATTACHMENT_INFO &fb_info = framebuffer->attachments[i];
@@ -10905,7 +10835,8 @@
reinterpret_cast<uint64_t &>(renderPass), __LINE__, VALIDATION_ERROR_00442, "DS",
"In vkCmdBeginRenderPass() the VkRenderPassBeginInfo struct has a clearValueCount of %u but there must "
"be at least %u entries in pClearValues array to account for the highest index attachment in renderPass "
- "0x%" PRIx64 " that uses VK_ATTACHMENT_LOAD_OP_CLEAR is %u. Note that the pClearValues array "
+ "0x%" PRIx64
+ " that uses VK_ATTACHMENT_LOAD_OP_CLEAR is %u. Note that the pClearValues array "
"is indexed by attachment number so even if some pClearValues entries between 0 and %u correspond to "
"attachments that aren't cleared they will be ignored. %s",
pRenderPassBegin->clearValueCount, clear_op_size, reinterpret_cast<uint64_t &>(renderPass), clear_op_size,
@@ -10966,8 +10897,7 @@
}
lock.unlock();
- if (skip_call)
- return;
+ if (skip_call) return;
dev_data->dispatch_table.CmdNextSubpass(commandBuffer, contents);
@@ -11022,8 +10952,7 @@
}
lock.unlock();
- if (skip_call)
- return;
+ if (skip_call) return;
dev_data->dispatch_table.CmdEndRenderPass(commandBuffer);
@@ -11039,10 +10968,10 @@
static bool logInvalidAttachmentMessage(layer_data *dev_data, VkCommandBuffer secondaryBuffer, uint32_t primaryAttach,
uint32_t secondaryAttach, const char *msg) {
return log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_02059, "DS",
- "vkCmdExecuteCommands() called w/ invalid Secondary Cmd Buffer 0x%" PRIx64 " which has a render pass "
- "that is not compatible with the Primary Cmd Buffer current render pass. "
- "Attachment %u is not compatible with %u: %s. %s",
+ VALIDATION_ERROR_02059, "DS", "vkCmdExecuteCommands() called w/ invalid Secondary Cmd Buffer 0x%" PRIx64
+ " which has a render pass "
+ "that is not compatible with the Primary Cmd Buffer current render pass. "
+ "Attachment %u is not compatible with %u: %s. %s",
reinterpret_cast<uint64_t &>(secondaryBuffer), primaryAttach, secondaryAttach, msg,
validation_error_map[VALIDATION_ERROR_02059]);
}
@@ -11208,14 +11137,14 @@
pSubCB->beginInfo.pInheritanceInfo) {
VkQueryPipelineStatisticFlags cmdBufStatistics = pSubCB->beginInfo.pInheritanceInfo->pipelineStatistics;
if ((cmdBufStatistics & queryPoolData->second.createInfo.pipelineStatistics) != cmdBufStatistics) {
- skip_call |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_02065, "DS", "vkCmdExecuteCommands() called w/ invalid Cmd Buffer 0x%p "
- "which has invalid active query pool 0x%" PRIx64
- ". Pipeline statistics is being queried so the command "
- "buffer must have all bits set on the queryPool. %s",
- pCB->commandBuffer, reinterpret_cast<const uint64_t &>(queryPoolData->first),
- validation_error_map[VALIDATION_ERROR_02065]);
+ skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
+ __LINE__, VALIDATION_ERROR_02065, "DS",
+ "vkCmdExecuteCommands() called w/ invalid Cmd Buffer 0x%p "
+ "which has invalid active query pool 0x%" PRIx64
+ ". Pipeline statistics is being queried so the command "
+ "buffer must have all bits set on the queryPool. %s",
+ pCB->commandBuffer, reinterpret_cast<const uint64_t &>(queryPoolData->first),
+ validation_error_map[VALIDATION_ERROR_02065]);
}
}
activeTypes.insert(queryPoolData->second.createInfo.queryType);
@@ -11224,14 +11153,14 @@
for (auto queryObject : pSubCB->startedQueries) {
auto queryPoolData = dev_data->queryPoolMap.find(queryObject.pool);
if (queryPoolData != dev_data->queryPoolMap.end() && activeTypes.count(queryPoolData->second.createInfo.queryType)) {
- skip_call |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- DRAWSTATE_INVALID_SECONDARY_COMMAND_BUFFER, "DS",
- "vkCmdExecuteCommands() called w/ invalid Cmd Buffer 0x%p "
- "which has invalid active query pool 0x%" PRIx64 "of type %d but a query of that type has been started on "
- "secondary Cmd Buffer 0x%p.",
- pCB->commandBuffer, reinterpret_cast<const uint64_t &>(queryPoolData->first),
- queryPoolData->second.createInfo.queryType, pSubCB->commandBuffer);
+ skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ DRAWSTATE_INVALID_SECONDARY_COMMAND_BUFFER, "DS",
+ "vkCmdExecuteCommands() called w/ invalid Cmd Buffer 0x%p "
+ "which has invalid active query pool 0x%" PRIx64
+ "of type %d but a query of that type has been started on "
+ "secondary Cmd Buffer 0x%p.",
+ pCB->commandBuffer, reinterpret_cast<const uint64_t &>(queryPoolData->first),
+ queryPoolData->second.createInfo.queryType, pSubCB->commandBuffer);
}
}
@@ -11266,7 +11195,7 @@
"vkCmdExecuteCommands() called w/ Primary Cmd Buffer 0x%p in element %u of pCommandBuffers "
"array. All cmd buffers in pCommandBuffers array must be secondary. %s",
pCommandBuffers[i], i, validation_error_map[VALIDATION_ERROR_00153]);
- } else if (pCB->activeRenderPass) { // Secondary CB w/i RenderPass must have *CONTINUE_BIT set
+ } else if (pCB->activeRenderPass) { // Secondary CB w/i RenderPass must have *CONTINUE_BIT set
auto secondary_rp_state = getRenderPassState(dev_data, pSubCB->beginInfo.pInheritanceInfo->renderPass);
if (!(pSubCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
skip_call |= log_msg(
@@ -11307,12 +11236,12 @@
// being recorded
if (!(pSubCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT)) {
if (dev_data->globalInFlightCmdBuffers.find(pSubCB->commandBuffer) != dev_data->globalInFlightCmdBuffers.end()) {
- skip_call |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(pCB->commandBuffer), __LINE__,
- VALIDATION_ERROR_00154, "DS", "Attempt to simultaneously execute command buffer 0x%p"
- " without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set! %s",
- pCB->commandBuffer, validation_error_map[VALIDATION_ERROR_00154]);
+ skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)(pCB->commandBuffer), __LINE__,
+ VALIDATION_ERROR_00154, "DS",
+ "Attempt to simultaneously execute command buffer 0x%p"
+ " without VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set! %s",
+ pCB->commandBuffer, validation_error_map[VALIDATION_ERROR_00154]);
}
if (pCB->beginInfo.flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT) {
// Warn that non-simultaneous secondary cmd buffer renders primary non-simultaneous
@@ -11328,13 +11257,14 @@
}
}
if (!pCB->activeQueries.empty() && !dev_data->enabled_features.inheritedQueries) {
- skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
- VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, reinterpret_cast<uint64_t>(pCommandBuffers[i]),
- __LINE__, VALIDATION_ERROR_02062, "DS", "vkCmdExecuteCommands(): Secondary Command Buffer "
- "(0x%p) cannot be submitted with a query in "
- "flight and inherited queries not "
- "supported on this device. %s",
- pCommandBuffers[i], validation_error_map[VALIDATION_ERROR_02062]);
+ skip_call |=
+ log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ reinterpret_cast<uint64_t>(pCommandBuffers[i]), __LINE__, VALIDATION_ERROR_02062, "DS",
+ "vkCmdExecuteCommands(): Secondary Command Buffer "
+ "(0x%p) cannot be submitted with a query in "
+ "flight and inherited queries not "
+ "supported on this device. %s",
+ pCommandBuffers[i], validation_error_map[VALIDATION_ERROR_02062]);
}
// Propagate layout transitions to the primary cmd buffer
for (auto ilm_entry : pSubCB->imageLayoutMap) {
@@ -11352,8 +11282,7 @@
UpdateCmdBufferLastCmd(dev_data, pCB, CMD_EXECUTECOMMANDS);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.CmdExecuteCommands(commandBuffer, commandBuffersCount, pCommandBuffers);
+ if (!skip_call) dev_data->dispatch_table.CmdExecuteCommands(commandBuffer, commandBuffersCount, pCommandBuffers);
}
// For any image objects that overlap mapped memory, verify that their layouts are PREINIT or GENERAL
@@ -11372,11 +11301,11 @@
if (FindLayouts(dev_data, VkImage(image_handle), layouts)) {
for (auto layout : layouts) {
if (layout != VK_IMAGE_LAYOUT_PREINITIALIZED && layout != VK_IMAGE_LAYOUT_GENERAL) {
- skip_call |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0,
- __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS", "Cannot map an image with layout %s. Only "
- "GENERAL or PREINITIALIZED are supported.",
- string_VkImageLayout(layout));
+ skip_call |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ (VkDebugReportObjectTypeEXT)0, 0, __LINE__, DRAWSTATE_INVALID_IMAGE_LAYOUT, "DS",
+ "Cannot map an image with layout %s. Only "
+ "GENERAL or PREINITIALIZED are supported.",
+ string_VkImageLayout(layout));
}
}
}
@@ -11445,13 +11374,13 @@
if (mem_info) {
if (pMemRanges[i].size == VK_WHOLE_SIZE) {
if (mem_info->mem_range.offset > pMemRanges[i].offset) {
- skip |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
- (uint64_t)pMemRanges[i].memory, __LINE__, VALIDATION_ERROR_00643, "MEM",
- "%s: Flush/Invalidate offset (" PRINTF_SIZE_T_SPECIFIER ") is less than Memory Object's offset "
- "(" PRINTF_SIZE_T_SPECIFIER "). %s",
- funcName, static_cast<size_t>(pMemRanges[i].offset),
- static_cast<size_t>(mem_info->mem_range.offset), validation_error_map[VALIDATION_ERROR_00643]);
+ skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, (uint64_t)pMemRanges[i].memory, __LINE__,
+ VALIDATION_ERROR_00643, "MEM", "%s: Flush/Invalidate offset (" PRINTF_SIZE_T_SPECIFIER
+ ") is less than Memory Object's offset "
+ "(" PRINTF_SIZE_T_SPECIFIER "). %s",
+ funcName, static_cast<size_t>(pMemRanges[i].offset),
+ static_cast<size_t>(mem_info->mem_range.offset), validation_error_map[VALIDATION_ERROR_00643]);
}
} else {
const uint64_t data_end = (mem_info->mem_range.size == VK_WHOLE_SIZE)
@@ -11672,8 +11601,7 @@
event_entry->second |= VK_PIPELINE_STAGE_HOST_BIT;
}
}
- if (!skip_call)
- result = dev_data->dispatch_table.SetEvent(device, event);
+ if (!skip_call) result = dev_data->dispatch_table.SetEvent(device, event);
return result;
}
@@ -11781,8 +11709,7 @@
lock.unlock();
- if (!skip_call)
- return dev_data->dispatch_table.QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+ if (!skip_call) return dev_data->dispatch_table.QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
return result;
}
@@ -11838,7 +11765,7 @@
reinterpret_cast<uint64_t>(dev_data->physical_device), __LINE__, DRAWSTATE_SWAPCHAIN_CREATE_BEFORE_QUERY, "DS",
"%s: surface capabilities not retrieved for this physical device", func_name))
return true;
- } else { // have valid capabilities
+ } else { // have valid capabilities
auto &capabilities = physical_device_state->surfaceCapabilities;
// Validate pCreateInfo->minImageCount against VkSurfaceCapabilitiesKHR::{min|max}ImageCount:
if (pCreateInfo->minImageCount < capabilities.minImageCount) {
@@ -12094,17 +12021,14 @@
auto surface_state = getSurfaceState(dev_data->instance_data, swapchain_data->createInfo.surface);
if (surface_state) {
- if (surface_state->swapchain == swapchain_data)
- surface_state->swapchain = nullptr;
- if (surface_state->old_swapchain == swapchain_data)
- surface_state->old_swapchain = nullptr;
+ if (surface_state->swapchain == swapchain_data) surface_state->swapchain = nullptr;
+ if (surface_state->old_swapchain == swapchain_data) surface_state->old_swapchain = nullptr;
}
dev_data->device_extensions.swapchainMap.erase(swapchain);
}
lock.unlock();
- if (!skip_call)
- dev_data->dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
+ if (!skip_call) dev_data->dispatch_table.DestroySwapchainKHR(device, swapchain, pAllocator);
}
VKAPI_ATTR VkResult VKAPI_CALL GetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pCount,
@@ -12114,8 +12038,7 @@
if (result == VK_SUCCESS && pSwapchainImages != NULL) {
// This should never happen and is checked by param checker.
- if (!pCount)
- return result;
+ if (!pCount) return result;
std::lock_guard<std::mutex> lock(global_lock);
const size_t count = *pCount;
auto swapchain_node = getSwapchainNode(dev_data, swapchain);
@@ -12225,15 +12148,16 @@
skip_call |=
log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
reinterpret_cast<uint64_t const &>(pPresentInfo->pSwapchains[i]), __LINE__,
- DRAWSTATE_SWAPCHAIN_UNSUPPORTED_QUEUE, "DS", "vkQueuePresentKHR: Presenting image without calling "
- "vkGetPhysicalDeviceSurfaceSupportKHR");
+ DRAWSTATE_SWAPCHAIN_UNSUPPORTED_QUEUE, "DS",
+ "vkQueuePresentKHR: Presenting image without calling "
+ "vkGetPhysicalDeviceSurfaceSupportKHR");
} else if (!support_it->second) {
- skip_call |=
- log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
- reinterpret_cast<uint64_t const &>(pPresentInfo->pSwapchains[i]), __LINE__, VALIDATION_ERROR_01961,
- "DS", "vkQueuePresentKHR: Presenting image on queue that cannot "
- "present to this surface. %s",
- validation_error_map[VALIDATION_ERROR_01961]);
+ skip_call |= log_msg(
+ dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+ reinterpret_cast<uint64_t const &>(pPresentInfo->pSwapchains[i]), __LINE__, VALIDATION_ERROR_01961, "DS",
+ "vkQueuePresentKHR: Presenting image on queue that cannot "
+ "present to this surface. %s",
+ validation_error_map[VALIDATION_ERROR_01961]);
}
}
}
@@ -12262,8 +12186,7 @@
// itself just as much.
auto local_result = pPresentInfo->pResults ? pPresentInfo->pResults[i] : result;
- if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR)
- continue; // this present didn't actually happen.
+ if (local_result != VK_SUCCESS && local_result != VK_SUBOPTIMAL_KHR) continue; // this present didn't actually happen.
// Mark the image as having been released to the WSI
auto swapchain_data = getSwapchainNode(dev_data, pPresentInfo->pSwapchains[i]);
@@ -12404,8 +12327,7 @@
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
VkResult result = dev_data->dispatch_table.AcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex);
@@ -12413,7 +12335,7 @@
if (result == VK_SUCCESS || result == VK_SUBOPTIMAL_KHR) {
if (pFence) {
pFence->state = FENCE_INFLIGHT;
- pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
+ pFence->signaler.first = VK_NULL_HANDLE; // ANI isn't on a queue, so this can't participate in a completion proof.
}
// A successful call to AcquireNextImageKHR counts as a signal operation on semaphore
@@ -12449,7 +12371,7 @@
VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, 0, __LINE__, DEVLIMITS_MISSING_QUERY_COUNT, "DL",
"Call sequence has vkEnumeratePhysicalDevices() w/ non-NULL pPhysicalDevices. You should first "
"call vkEnumeratePhysicalDevices() w/ NULL pPhysicalDevices to query pPhysicalDeviceCount.");
- } // TODO : Could also flag a warning if re-calling this function in QUERY_DETAILS state
+ } // TODO : Could also flag a warning if re-calling this function in QUERY_DETAILS state
else if (instance_data->physical_devices_count != *pPhysicalDeviceCount) {
// Having actual count match count from app is not a requirement, so this can be a warning
skip_call |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
@@ -12466,7 +12388,7 @@
VkResult result = instance_data->dispatch_table.EnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
if (NULL == pPhysicalDevices) {
instance_data->physical_devices_count = *pPhysicalDeviceCount;
- } else if (result == VK_SUCCESS) { // Save physical devices
+ } else if (result == VK_SUCCESS) { // Save physical devices
for (uint32_t i = 0; i < *pPhysicalDeviceCount; i++) {
auto &phys_device_state = instance_data->physical_device_map[pPhysicalDevices[i]];
phys_device_state.phys_device = pPhysicalDevices[i];
@@ -12498,7 +12420,6 @@
}
// Then verify that pCount that is passed in on second call matches what was returned
if (physical_device_state->queueFamilyPropertiesCount != *pCount) {
-
// TODO: this is not a requirement of the Valid Usage section for vkGetPhysicalDeviceQueueFamilyProperties, so
// provide as warning
skip_call |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
@@ -12515,7 +12436,7 @@
instance_data->dispatch_table.GetPhysicalDeviceQueueFamilyProperties(physicalDevice, pCount, pQueueFamilyProperties);
if (!pQueueFamilyProperties) {
physical_device_state->queueFamilyPropertiesCount = *pCount;
- } else { // Save queue family properties
+ } else { // Save queue family properties
if (physical_device_state->queue_family_properties.size() < *pCount)
physical_device_state->queue_family_properties.resize(*pCount);
for (uint32_t i = 0; i < *pCount; i++) {
@@ -12575,42 +12496,42 @@
const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
return CreateSurface(instance, pCreateInfo, pAllocator, pSurface, &VkLayerInstanceDispatchTable::CreateAndroidSurfaceKHR);
}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
return CreateSurface(instance, pCreateInfo, pAllocator, pSurface, &VkLayerInstanceDispatchTable::CreateMirSurfaceKHR);
}
-#endif // VK_USE_PLATFORM_MIR_KHR
+#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
return CreateSurface(instance, pCreateInfo, pAllocator, pSurface, &VkLayerInstanceDispatchTable::CreateWaylandSurfaceKHR);
}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
return CreateSurface(instance, pCreateInfo, pAllocator, pSurface, &VkLayerInstanceDispatchTable::CreateWin32SurfaceKHR);
}
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
return CreateSurface(instance, pCreateInfo, pAllocator, pSurface, &VkLayerInstanceDispatchTable::CreateXcbSurfaceKHR);
}
-#endif // VK_USE_PLATFORM_XCB_KHR
+#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
return CreateSurface(instance, pCreateInfo, pAllocator, pSurface, &VkLayerInstanceDispatchTable::CreateXlibSurfaceKHR);
}
-#endif // VK_USE_PLATFORM_XLIB_KHR
+#endif // VK_USE_PLATFORM_XLIB_KHR
VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
@@ -12662,47 +12583,44 @@
// Compare the preliminary value of *pPresentModeCount with the value this time:
auto prev_mode_count = (uint32_t)physical_device_state->present_modes.size();
switch (call_state) {
- case UNCALLED:
- skip_call |= log_msg(
- instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- reinterpret_cast<uint64_t>(physicalDevice), __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
- "vkGetPhysicalDeviceSurfacePresentModesKHR() called with non-NULL pPresentModeCount; but no prior positive "
- "value has been seen for pPresentModeCount.");
- break;
- default:
- // both query count and query details
- if (*pPresentModeCount != prev_mode_count) {
+ case UNCALLED:
skip_call |= log_msg(
instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- reinterpret_cast<uint64_t>(physicalDevice), __LINE__, DEVLIMITS_COUNT_MISMATCH, "DL",
- "vkGetPhysicalDeviceSurfacePresentModesKHR() called with *pPresentModeCount (%u) that differs from the value "
- "(%u) that was returned when pPresentModes was NULL.",
- *pPresentModeCount, prev_mode_count);
- }
- break;
+ reinterpret_cast<uint64_t>(physicalDevice), __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
+ "vkGetPhysicalDeviceSurfacePresentModesKHR() called with non-NULL pPresentModeCount; but no prior positive "
+ "value has been seen for pPresentModeCount.");
+ break;
+ default:
+ // both query count and query details
+ if (*pPresentModeCount != prev_mode_count) {
+ skip_call |= log_msg(instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+ reinterpret_cast<uint64_t>(physicalDevice), __LINE__, DEVLIMITS_COUNT_MISMATCH, "DL",
+ "vkGetPhysicalDeviceSurfacePresentModesKHR() called with *pPresentModeCount (%u) that "
+ "differs from the value "
+ "(%u) that was returned when pPresentModes was NULL.",
+ *pPresentModeCount, prev_mode_count);
+ }
+ break;
}
}
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
auto result = instance_data->dispatch_table.GetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount,
pPresentModes);
if (result == VK_SUCCESS || result == VK_INCOMPLETE) {
-
lock.lock();
if (*pPresentModeCount) {
- if (call_state < QUERY_COUNT)
- call_state = QUERY_COUNT;
+ if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
if (*pPresentModeCount > physical_device_state->present_modes.size())
physical_device_state->present_modes.resize(*pPresentModeCount);
}
if (pPresentModes) {
- if (call_state < QUERY_DETAILS)
- call_state = QUERY_DETAILS;
+ if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
for (uint32_t i = 0; i < *pPresentModeCount; i++) {
physical_device_state->present_modes[i] = pPresentModes[i];
}
@@ -12725,50 +12643,49 @@
auto prev_format_count = (uint32_t)physical_device_state->surface_formats.size();
switch (call_state) {
- case UNCALLED:
- // Since we haven't recorded a preliminary value of *pSurfaceFormatCount, that likely means that the application didn't
- // previously call this function with a NULL value of pSurfaceFormats:
- skip_call |= log_msg(
- instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- reinterpret_cast<uint64_t>(physicalDevice), __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
- "vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount; but no prior positive "
- "value has been seen for pSurfaceFormats.");
- break;
- default:
- if (prev_format_count != *pSurfaceFormatCount) {
+ case UNCALLED:
+ // Since we haven't recorded a preliminary value of *pSurfaceFormatCount, that likely means that the application
+ // didn't
+ // previously call this function with a NULL value of pSurfaceFormats:
skip_call |= log_msg(
instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
- reinterpret_cast<uint64_t>(physicalDevice), __LINE__, DEVLIMITS_COUNT_MISMATCH, "DL",
- "vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount, and with pSurfaceFormats set "
- "to "
- "a value (%u) that is greater than the value (%u) that was returned when pSurfaceFormatCount was NULL.",
- *pSurfaceFormatCount, prev_format_count);
- }
- break;
+ reinterpret_cast<uint64_t>(physicalDevice), __LINE__, DEVLIMITS_MUST_QUERY_COUNT, "DL",
+ "vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount; but no prior positive "
+ "value has been seen for pSurfaceFormats.");
+ break;
+ default:
+ if (prev_format_count != *pSurfaceFormatCount) {
+ skip_call |= log_msg(
+ instance_data->report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT,
+ VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, reinterpret_cast<uint64_t>(physicalDevice), __LINE__,
+ DEVLIMITS_COUNT_MISMATCH, "DL",
+ "vkGetPhysicalDeviceSurfaceFormatsKHR() called with non-NULL pSurfaceFormatCount, and with pSurfaceFormats "
+ "set "
+ "to "
+ "a value (%u) that is greater than the value (%u) that was returned when pSurfaceFormatCount was NULL.",
+ *pSurfaceFormatCount, prev_format_count);
+ }
+ break;
}
}
lock.unlock();
- if (skip_call)
- return VK_ERROR_VALIDATION_FAILED_EXT;
+ if (skip_call) return VK_ERROR_VALIDATION_FAILED_EXT;
// Call down the call chain:
auto result = instance_data->dispatch_table.GetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount,
pSurfaceFormats);
if (result == VK_SUCCESS || result == VK_INCOMPLETE) {
-
lock.lock();
if (*pSurfaceFormatCount) {
- if (call_state < QUERY_COUNT)
- call_state = QUERY_COUNT;
+ if (call_state < QUERY_COUNT) call_state = QUERY_COUNT;
if (*pSurfaceFormatCount > physical_device_state->surface_formats.size())
physical_device_state->surface_formats.resize(*pSurfaceFormatCount);
}
if (pSurfaceFormats) {
- if (call_state < QUERY_DETAILS)
- call_state = QUERY_DETAILS;
+ if (call_state < QUERY_DETAILS) call_state = QUERY_DETAILS;
for (uint32_t i = 0; i < *pSurfaceFormatCount; i++) {
physical_device_state->surface_formats[i] = pSurfaceFormats[i];
}
@@ -12824,8 +12741,7 @@
VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
uint32_t *pCount, VkExtensionProperties *pProperties) {
- if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
- return util_GetExtensionProperties(0, NULL, pCount, pProperties);
+ if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(0, NULL, pCount, pProperties);
assert(physicalDevice);
@@ -12845,48 +12761,38 @@
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice dev, const char *funcName) {
PFN_vkVoidFunction proc = intercept_core_device_command(funcName);
- if (proc)
- return proc;
+ if (proc) return proc;
assert(dev);
proc = intercept_khr_swapchain_command(funcName, dev);
- if (proc)
- return proc;
+ if (proc) return proc;
layer_data *dev_data = get_my_data_ptr(get_dispatch_key(dev), layer_data_map);
auto &table = dev_data->dispatch_table;
- if (!table.GetDeviceProcAddr)
- return nullptr;
+ if (!table.GetDeviceProcAddr) return nullptr;
return table.GetDeviceProcAddr(dev, funcName);
}
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
PFN_vkVoidFunction proc = intercept_core_instance_command(funcName);
- if (!proc)
- proc = intercept_core_device_command(funcName);
- if (!proc)
- proc = intercept_khr_swapchain_command(funcName, VK_NULL_HANDLE);
- if (!proc)
- proc = intercept_khr_surface_command(funcName, instance);
- if (proc)
- return proc;
+ if (!proc) proc = intercept_core_device_command(funcName);
+ if (!proc) proc = intercept_khr_swapchain_command(funcName, VK_NULL_HANDLE);
+ if (!proc) proc = intercept_khr_surface_command(funcName, instance);
+ if (proc) return proc;
assert(instance);
instance_layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), instance_layer_data_map);
proc = debug_report_get_instance_proc_addr(instance_data->report_data, funcName);
- if (proc)
- return proc;
+ if (proc) return proc;
proc = intercept_extension_instance_commands(funcName, instance);
- if (proc)
- return proc;
+ if (proc) return proc;
auto &table = instance_data->dispatch_table;
- if (!table.GetInstanceProcAddr)
- return nullptr;
+ if (!table.GetInstanceProcAddr) return nullptr;
return table.GetInstanceProcAddr(instance, funcName);
}
@@ -12896,8 +12802,7 @@
instance_layer_data *instance_data = get_my_data_ptr(get_dispatch_key(instance), instance_layer_data_map);
auto &table = instance_data->dispatch_table;
- if (!table.GetPhysicalDeviceProcAddr)
- return nullptr;
+ if (!table.GetPhysicalDeviceProcAddr) return nullptr;
return table.GetPhysicalDeviceProcAddr(instance, funcName);
}
@@ -12921,8 +12826,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(core_instance_commands); i++) {
- if (!strcmp(core_instance_commands[i].name, name))
- return core_instance_commands[i].proc;
+ if (!strcmp(core_instance_commands[i].name, name)) return core_instance_commands[i].proc;
}
return nullptr;
@@ -13052,8 +12956,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(core_device_commands); i++) {
- if (!strcmp(core_device_commands[i].name, name))
- return core_device_commands[i].proc;
+ if (!strcmp(core_device_commands[i].name, name)) return core_device_commands[i].proc;
}
return nullptr;
@@ -13074,22 +12977,18 @@
if (dev) {
dev_data = get_my_data_ptr(get_dispatch_key(dev), layer_data_map);
- if (!dev_data->device_extensions.wsi_enabled)
- return nullptr;
+ if (!dev_data->device_extensions.wsi_enabled) return nullptr;
}
for (size_t i = 0; i < ARRAY_SIZE(khr_swapchain_commands); i++) {
- if (!strcmp(khr_swapchain_commands[i].name, name))
- return khr_swapchain_commands[i].proc;
+ if (!strcmp(khr_swapchain_commands[i].name, name)) return khr_swapchain_commands[i].proc;
}
if (dev_data) {
- if (!dev_data->device_extensions.wsi_display_swapchain_enabled)
- return nullptr;
+ if (!dev_data->device_extensions.wsi_display_swapchain_enabled) return nullptr;
}
- if (!strcmp("vkCreateSharedSwapchainsKHR", name))
- return reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR);
+ if (!strcmp("vkCreateSharedSwapchainsKHR", name)) return reinterpret_cast<PFN_vkVoidFunction>(CreateSharedSwapchainsKHR);
return nullptr;
}
@@ -13103,27 +13002,27 @@
#ifdef VK_USE_PLATFORM_ANDROID_KHR
{"vkCreateAndroidSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR),
&instance_layer_data::androidSurfaceExtensionEnabled},
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
{"vkCreateMirSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR),
&instance_layer_data::mirSurfaceExtensionEnabled},
-#endif // VK_USE_PLATFORM_MIR_KHR
+#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
{"vkCreateWaylandSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR),
&instance_layer_data::waylandSurfaceExtensionEnabled},
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
{"vkCreateWin32SurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR),
&instance_layer_data::win32SurfaceExtensionEnabled},
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
{"vkCreateXcbSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR),
&instance_layer_data::xcbSurfaceExtensionEnabled},
-#endif // VK_USE_PLATFORM_XCB_KHR
+#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
{"vkCreateXlibSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR),
&instance_layer_data::xlibSurfaceExtensionEnabled},
-#endif // VK_USE_PLATFORM_XLIB_KHR
+#endif // VK_USE_PLATFORM_XLIB_KHR
{"vkCreateDisplayPlaneSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateDisplayPlaneSurfaceKHR),
&instance_layer_data::displayExtensionEnabled},
{"vkDestroySurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR),
@@ -13145,8 +13044,7 @@
for (size_t i = 0; i < ARRAY_SIZE(khr_surface_commands); i++) {
if (!strcmp(khr_surface_commands[i].name, name)) {
- if (instance_data && !(instance_data->*(khr_surface_commands[i].enable)))
- return nullptr;
+ if (instance_data && !(instance_data->*(khr_surface_commands[i].enable))) return nullptr;
return khr_surface_commands[i].proc;
}
}
@@ -13156,7 +13054,7 @@
static PFN_vkVoidFunction intercept_extension_instance_commands(const char *name, VkInstance instance) { return NULL; }
-} // namespace core_validation
+} // namespace core_validation
// vk_layer_logging.h expects these to be defined
diff --git a/layers/core_validation.h b/layers/core_validation.h
index dc0e0ff..e6c92b2 100644
--- a/layers/core_validation.h
+++ b/layers/core_validation.h
@@ -69,23 +69,23 @@
struct CHECK_DISABLED {
bool command_buffer_state;
bool create_descriptor_set_layout;
- bool destroy_buffer_view; // Skip validation at DestroyBufferView time
- bool destroy_image_view; // Skip validation at DestroyImageView time
- bool destroy_pipeline; // Skip validation at DestroyPipeline time
- bool destroy_descriptor_pool; // Skip validation at DestroyDescriptorPool time
- bool destroy_framebuffer; // Skip validation at DestroyFramebuffer time
- bool destroy_renderpass; // Skip validation at DestroyRenderpass time
- bool destroy_image; // Skip validation at DestroyImage time
- bool destroy_sampler; // Skip validation at DestroySampler time
- bool destroy_command_pool; // Skip validation at DestroyCommandPool time
- bool destroy_event; // Skip validation at DestroyEvent time
- bool free_memory; // Skip validation at FreeMemory time
- bool object_in_use; // Skip all object in_use checking
- bool idle_descriptor_set; // Skip check to verify that descriptor set is no in-use
- bool push_constant_range; // Skip push constant range checks
- bool free_descriptor_sets; // Skip validation prior to vkFreeDescriptorSets()
- bool allocate_descriptor_sets; // Skip validation prior to vkAllocateDescriptorSets()
- bool update_descriptor_sets; // Skip validation prior to vkUpdateDescriptorSets()
+ bool destroy_buffer_view; // Skip validation at DestroyBufferView time
+ bool destroy_image_view; // Skip validation at DestroyImageView time
+ bool destroy_pipeline; // Skip validation at DestroyPipeline time
+ bool destroy_descriptor_pool; // Skip validation at DestroyDescriptorPool time
+ bool destroy_framebuffer; // Skip validation at DestroyFramebuffer time
+ bool destroy_renderpass; // Skip validation at DestroyRenderpass time
+ bool destroy_image; // Skip validation at DestroyImage time
+ bool destroy_sampler; // Skip validation at DestroySampler time
+ bool destroy_command_pool; // Skip validation at DestroyCommandPool time
+ bool destroy_event; // Skip validation at DestroyEvent time
+ bool free_memory; // Skip validation at FreeMemory time
+ bool object_in_use; // Skip all object in_use checking
+ bool idle_descriptor_set; // Skip check to verify that descriptor set is no in-use
+ bool push_constant_range; // Skip push constant range checks
+ bool free_descriptor_sets; // Skip validation prior to vkFreeDescriptorSets()
+ bool allocate_descriptor_sets; // Skip validation prior to vkAllocateDescriptorSets()
+ bool update_descriptor_sets; // Skip validation prior to vkUpdateDescriptorSets()
bool wait_for_fences;
bool get_fence_state;
bool queue_wait_idle;
@@ -140,7 +140,7 @@
};
class PHYS_DEV_PROPERTIES_NODE {
- public:
+ public:
VkPhysicalDeviceProperties properties;
std::vector<VkQueueFamilyProperties> queue_family_properties;
};
@@ -148,7 +148,7 @@
enum FENCE_STATE { FENCE_UNSIGNALED, FENCE_INFLIGHT, FENCE_RETIRED };
class FENCE_NODE {
- public:
+ public:
VkFence fence;
VkFenceCreateInfo createInfo;
std::pair<VkQueue, uint64_t> signaler;
@@ -159,36 +159,36 @@
};
class SEMAPHORE_NODE : public BASE_NODE {
- public:
+ public:
std::pair<VkQueue, uint64_t> signaler;
bool signaled;
};
class EVENT_STATE : public BASE_NODE {
- public:
+ public:
int write_in_use;
bool needsSignaled;
VkPipelineStageFlags stageMask;
};
class QUEUE_STATE {
- public:
+ public:
VkQueue queue;
uint32_t queueFamilyIndex;
std::unordered_map<VkEvent, VkPipelineStageFlags> eventToStageMap;
- std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
+ std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
uint64_t seq;
std::deque<CB_SUBMISSION> submissions;
};
class QUERY_POOL_NODE : public BASE_NODE {
- public:
+ public:
VkQueryPoolCreateInfo createInfo;
};
class FRAMEBUFFER_STATE : public BASE_NODE {
- public:
+ public:
VkFramebuffer framebuffer;
safe_VkFramebufferCreateInfo createInfo;
safe_VkRenderPassCreateInfo renderPassCreateInfo;
@@ -203,14 +203,14 @@
VkCommandPoolCreateFlags createFlags;
uint32_t queueFamilyIndex;
// TODO: why is this std::list?
- std::list<VkCommandBuffer> commandBuffers; // container of cmd buffers allocated from this pool
+ std::list<VkCommandBuffer> commandBuffers; // container of cmd buffers allocated from this pool
};
// Stuff from Device Limits Layer
enum CALL_STATE {
- UNCALLED, // Function has not been called
- QUERY_COUNT, // Function called once to query a count
- QUERY_DETAILS, // Function called w/ a count to query details
+ UNCALLED, // Function has not been called
+ QUERY_COUNT, // Function called once to query a count
+ QUERY_DETAILS, // Function called w/ a count to query details
};
struct PHYSICAL_DEVICE_STATE {
@@ -241,7 +241,8 @@
}
namespace std {
-template <> struct hash<GpuQueue> {
+template <>
+struct hash<GpuQueue> {
size_t operator()(GpuQueue gq) const throw() {
return hash<uint64_t>()((uint64_t)(gq.gpu)) ^ hash<uint32_t>()(gq.queue_family_index);
}
diff --git a/layers/core_validation_error_enums.h b/layers/core_validation_error_enums.h
index 4f9c230..e4dd0bb 100644
--- a/layers/core_validation_error_enums.h
+++ b/layers/core_validation_error_enums.h
@@ -189,4 +189,4 @@
DEVLIMITS_COUNT_MISMATCH,
DEVLIMITS_INVALID_QUEUE_CREATE_REQUEST,
};
-#endif // CORE_VALIDATION_ERROR_ENUMS_H_
+#endif // CORE_VALIDATION_ERROR_ENUMS_H_
diff --git a/layers/core_validation_types.h b/layers/core_validation_types.h
index d64d30f..e2b95f5 100644
--- a/layers/core_validation_types.h
+++ b/layers/core_validation_types.h
@@ -66,7 +66,7 @@
struct GLOBAL_CB_NODE;
class BASE_NODE {
- public:
+ public:
// Track when object is being used by an in-flight command buffer
std::atomic_int in_use;
// Track command buffers that this object is bound to
@@ -87,7 +87,8 @@
inline bool operator==(VK_OBJECT a, VK_OBJECT b) NOEXCEPT { return a.handle == b.handle && a.type == b.type; }
namespace std {
-template <> struct hash<VK_OBJECT> {
+template <>
+struct hash<VK_OBJECT> {
size_t operator()(VK_OBJECT obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); }
};
}
@@ -112,18 +113,22 @@
struct DESCRIPTOR_POOL_STATE : BASE_NODE {
VkDescriptorPool pool;
- uint32_t maxSets; // Max descriptor sets allowed in this pool
- uint32_t availableSets; // Available descriptor sets in this pool
+ uint32_t maxSets; // Max descriptor sets allowed in this pool
+ uint32_t availableSets; // Available descriptor sets in this pool
VkDescriptorPoolCreateInfo createInfo;
- std::unordered_set<cvdescriptorset::DescriptorSet *> sets; // Collection of all sets in this pool
- std::vector<uint32_t> maxDescriptorTypeCount; // Max # of descriptors of each type in this pool
- std::vector<uint32_t> availableDescriptorTypeCount; // Available # of descriptors of each type in this pool
+ std::unordered_set<cvdescriptorset::DescriptorSet *> sets; // Collection of all sets in this pool
+ std::vector<uint32_t> maxDescriptorTypeCount; // Max # of descriptors of each type in this pool
+ std::vector<uint32_t> availableDescriptorTypeCount; // Available # of descriptors of each type in this pool
DESCRIPTOR_POOL_STATE(const VkDescriptorPool pool, const VkDescriptorPoolCreateInfo *pCreateInfo)
- : pool(pool), maxSets(pCreateInfo->maxSets), availableSets(pCreateInfo->maxSets), createInfo(*pCreateInfo),
- maxDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0), availableDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0) {
- if (createInfo.poolSizeCount) { // Shadow type struct from ptr into local struct
+ : pool(pool),
+ maxSets(pCreateInfo->maxSets),
+ availableSets(pCreateInfo->maxSets),
+ createInfo(*pCreateInfo),
+ maxDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0),
+ availableDescriptorTypeCount(VK_DESCRIPTOR_TYPE_RANGE_SIZE, 0) {
+ if (createInfo.poolSizeCount) { // Shadow type struct from ptr into local struct
size_t poolSizeCountSize = createInfo.poolSizeCount * sizeof(VkDescriptorPoolSize);
createInfo.pPoolSizes = new VkDescriptorPoolSize[poolSizeCountSize];
memcpy((void *)createInfo.pPoolSizes, pCreateInfo->pPoolSizes, poolSizeCountSize);
@@ -136,7 +141,7 @@
availableDescriptorTypeCount[typeIndex] = maxDescriptorTypeCount[typeIndex];
}
} else {
- createInfo.pPoolSizes = NULL; // Make sure this is NULL so we don't try to clean it up
+ createInfo.pPoolSizes = NULL; // Make sure this is NULL so we don't try to clean it up
}
}
~DESCRIPTOR_POOL_STATE() {
@@ -156,7 +161,8 @@
inline bool operator==(MEM_BINDING a, MEM_BINDING b) NOEXCEPT { return a.mem == b.mem && a.offset == b.offset && a.size == b.size; }
namespace std {
-template <> struct hash<MEM_BINDING> {
+template <>
+struct hash<MEM_BINDING> {
size_t operator()(MEM_BINDING mb) const NOEXCEPT {
auto intermediate = hash<uint64_t>()(reinterpret_cast<uint64_t &>(mb.mem)) ^ hash<uint64_t>()(mb.offset);
return intermediate ^ hash<uint64_t>()(mb.size);
@@ -166,8 +172,8 @@
// Superclass for bindable object state (currently images and buffers)
class BINDABLE : public BASE_NODE {
- public:
- bool sparse; // Is this object being bound with sparse memory or not?
+ public:
+ bool sparse; // Is this object being bound with sparse memory or not?
// Non-sparse binding data
MEM_BINDING binding;
// Memory requirements for this BINDABLE
@@ -194,7 +200,7 @@
};
class BUFFER_STATE : public BINDABLE {
- public:
+ public:
VkBuffer buffer;
VkBufferCreateInfo createInfo;
BUFFER_STATE(VkBuffer buff, const VkBufferCreateInfo *pCreateInfo) : buffer(buff), createInfo(*pCreateInfo) {
@@ -207,7 +213,7 @@
};
class BUFFER_VIEW_STATE : public BASE_NODE {
- public:
+ public:
VkBufferView buffer_view;
VkBufferViewCreateInfo create_info;
BUFFER_VIEW_STATE(VkBufferView bv, const VkBufferViewCreateInfo *ci) : buffer_view(bv), create_info(*ci){};
@@ -222,11 +228,11 @@
};
class IMAGE_STATE : public BINDABLE {
- public:
+ public:
VkImage image;
VkImageCreateInfo createInfo;
- bool valid; // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEM_INFO
- bool acquired; // If this is a swapchain image, has it been acquired by the app.
+ bool valid; // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEM_INFO
+ bool acquired; // If this is a swapchain image, has it been acquired by the app.
IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo)
: image(img), createInfo(*pCreateInfo), valid(false), acquired(false) {
if (createInfo.flags & VK_IMAGE_CREATE_SPARSE_BINDING_BIT) {
@@ -238,7 +244,7 @@
};
class IMAGE_VIEW_STATE : public BASE_NODE {
- public:
+ public:
VkImageView image_view;
VkImageViewCreateInfo create_info;
IMAGE_VIEW_STATE(VkImageView iv, const VkImageViewCreateInfo *ci) : image_view(iv), create_info(*ci){};
@@ -252,43 +258,50 @@
struct MEMORY_RANGE {
uint64_t handle;
- bool image; // True for image, false for buffer
- bool linear; // True for buffers and linear images
- bool valid; // True if this range is know to be valid
+ bool image; // True for image, false for buffer
+ bool linear; // True for buffers and linear images
+ bool valid; // True if this range is know to be valid
VkDeviceMemory memory;
VkDeviceSize start;
VkDeviceSize size;
- VkDeviceSize end; // Store this pre-computed for simplicity
+ VkDeviceSize end; // Store this pre-computed for simplicity
// Set of ptrs to every range aliased with this one
std::unordered_set<MEMORY_RANGE *> aliases;
};
// Data struct for tracking memory object
struct DEVICE_MEM_INFO : public BASE_NODE {
- void *object; // Dispatchable object used to create this memory (device of swapchain)
- bool global_valid; // If allocation is mapped, set to "true" to be picked up by subsequently bound ranges
+ void *object; // Dispatchable object used to create this memory (device of swapchain)
+ bool global_valid; // If allocation is mapped, set to "true" to be picked up by subsequently bound ranges
VkDeviceMemory mem;
VkMemoryAllocateInfo alloc_info;
- std::unordered_set<VK_OBJECT> obj_bindings; // objects bound to this memory
- std::unordered_map<uint64_t, MEMORY_RANGE> bound_ranges; // Map of object to its binding range
+ std::unordered_set<VK_OBJECT> obj_bindings; // objects bound to this memory
+ std::unordered_map<uint64_t, MEMORY_RANGE> bound_ranges; // Map of object to its binding range
// Convenience vectors image/buff handles to speed up iterating over images or buffers independently
std::unordered_set<uint64_t> bound_images;
std::unordered_set<uint64_t> bound_buffers;
MemRange mem_range;
- void *shadow_copy_base; // Base of layer's allocation for guard band, data, and alignment space
- void *shadow_copy; // Pointer to start of guard-band data before mapped region
- uint64_t shadow_pad_size; // Size of the guard-band data before and after actual data. It MUST be a
- // multiple of limits.minMemoryMapAlignment
- void *p_driver_data; // Pointer to application's actual memory
+ void *shadow_copy_base; // Base of layer's allocation for guard band, data, and alignment space
+ void *shadow_copy; // Pointer to start of guard-band data before mapped region
+ uint64_t shadow_pad_size; // Size of the guard-band data before and after actual data. It MUST be a
+ // multiple of limits.minMemoryMapAlignment
+ void *p_driver_data; // Pointer to application's actual memory
DEVICE_MEM_INFO(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info)
- : object(disp_object), global_valid(false), mem(in_mem), alloc_info(*p_alloc_info), mem_range{}, shadow_copy_base(0),
- shadow_copy(0), shadow_pad_size(0), p_driver_data(0){};
+ : object(disp_object),
+ global_valid(false),
+ mem(in_mem),
+ alloc_info(*p_alloc_info),
+ mem_range{},
+ shadow_copy_base(0),
+ shadow_copy(0),
+ shadow_pad_size(0),
+ p_driver_data(0){};
};
class SWAPCHAIN_NODE {
- public:
+ public:
safe_VkSwapchainCreateInfoKHR createInfo;
VkSwapchainKHR swapchain;
std::vector<VkImage> images;
@@ -308,7 +321,7 @@
};
class IMAGE_CMD_BUF_LAYOUT_NODE {
- public:
+ public:
IMAGE_CMD_BUF_LAYOUT_NODE() = default;
IMAGE_CMD_BUF_LAYOUT_NODE(VkImageLayout initialLayoutInput, VkImageLayout layoutInput)
: initialLayout(initialLayoutInput), layout(layoutInput) {}
@@ -387,14 +400,14 @@
CMD_NEXTSUBPASS,
CMD_ENDRENDERPASS,
CMD_EXECUTECOMMANDS,
- CMD_END, // Should be last command in any RECORDED cmd buffer
+ CMD_END, // Should be last command in any RECORDED cmd buffer
};
enum CB_STATE {
- CB_NEW, // Newly created CB w/o any cmds
- CB_RECORDING, // BeginCB has been called on this CB
- CB_RECORDED, // EndCB has been called on this CB
- CB_INVALID // CB had a bound descriptor set destroyed or updated
+ CB_NEW, // Newly created CB w/o any cmds
+ CB_RECORDING, // BeginCB has been called on this CB
+ CB_RECORDED, // EndCB has been called on this CB
+ CB_INVALID // CB had a bound descriptor set destroyed or updated
};
// CB Status -- used to track status of various bindings on cmd buffer objects
@@ -424,7 +437,8 @@
}
namespace std {
-template <> struct hash<QueryObject> {
+template <>
+struct hash<QueryObject> {
size_t operator()(QueryObject query) const throw() {
return hash<uint64_t>()((uint64_t)(query.pool)) ^ hash<uint32_t>()(query.index);
}
@@ -441,15 +455,15 @@
};
inline bool operator==(const ImageSubresourcePair &img1, const ImageSubresourcePair &img2) {
- if (img1.image != img2.image || img1.hasSubresource != img2.hasSubresource)
- return false;
+ if (img1.image != img2.image || img1.hasSubresource != img2.hasSubresource) return false;
return !img1.hasSubresource ||
(img1.subresource.aspectMask == img2.subresource.aspectMask && img1.subresource.mipLevel == img2.subresource.mipLevel &&
img1.subresource.arrayLayer == img2.subresource.arrayLayer);
}
namespace std {
-template <> struct hash<ImageSubresourcePair> {
+template <>
+struct hash<ImageSubresourcePair> {
size_t operator()(ImageSubresourcePair img) const throw() {
size_t hashVal = hash<uint64_t>()(reinterpret_cast<uint64_t &>(img.image));
hashVal ^= hash<bool>()(img.hasSubresource);
@@ -479,7 +493,7 @@
};
class PIPELINE_STATE : public BASE_NODE {
- public:
+ public:
VkPipeline pipeline;
safe_VkGraphicsPipelineCreateInfo graphicsPipelineCI;
safe_VkComputePipelineCreateInfo computePipelineCI;
@@ -492,15 +506,24 @@
std::vector<VkVertexInputBindingDescription> vertexBindingDescriptions;
std::vector<VkVertexInputAttributeDescription> vertexAttributeDescriptions;
std::vector<VkPipelineColorBlendAttachmentState> attachments;
- bool blendConstantsEnabled; // Blend constants enabled for any attachments
+ bool blendConstantsEnabled; // Blend constants enabled for any attachments
// Store RPCI b/c renderPass may be destroyed after Pipeline creation
safe_VkRenderPassCreateInfo render_pass_ci;
PIPELINE_LAYOUT_NODE pipeline_layout;
// Default constructor
PIPELINE_STATE()
- : pipeline{}, graphicsPipelineCI{}, computePipelineCI{}, active_shaders(0), duplicate_shaders(0), active_slots(),
- vertexBindingDescriptions(), vertexAttributeDescriptions(), attachments(), blendConstantsEnabled(false), render_pass_ci(),
+ : pipeline{},
+ graphicsPipelineCI{},
+ computePipelineCI{},
+ active_shaders(0),
+ duplicate_shaders(0),
+ active_slots(),
+ vertexBindingDescriptions(),
+ vertexAttributeDescriptions(),
+ attachments(),
+ blendConstantsEnabled(false),
+ render_pass_ci(),
pipeline_layout() {}
void initGraphicsPipeline(const VkGraphicsPipelineCreateInfo *pCreateInfo) {
@@ -539,12 +562,12 @@
VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
graphicsPipelineCI.initialize(&emptyGraphicsCI);
switch (computePipelineCI.stage.stage) {
- case VK_SHADER_STAGE_COMPUTE_BIT:
- this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
- break;
- default:
- // TODO : Flag error
- break;
+ case VK_SHADER_STAGE_COMPUTE_BIT:
+ this->active_shaders |= VK_SHADER_STAGE_COMPUTE_BIT;
+ break;
+ default:
+ // TODO : Flag error
+ break;
}
}
};
@@ -572,13 +595,13 @@
VkCommandBufferAllocateInfo createInfo;
VkCommandBufferBeginInfo beginInfo;
VkCommandBufferInheritanceInfo inheritanceInfo;
- VkDevice device; // device this CB belongs to
- uint64_t numCmds; // number of cmds in this CB
- uint64_t drawCount[NUM_DRAW_TYPES]; // Count of each type of draw in this CB
- CB_STATE state; // Track cmd buffer update state
- uint64_t submitCount; // Number of times CB has been submitted
- CBStatusFlags status; // Track status of various bindings on cmd buffer
- CMD_TYPE last_cmd; // Last command written to the CB
+ VkDevice device; // device this CB belongs to
+ uint64_t numCmds; // number of cmds in this CB
+ uint64_t drawCount[NUM_DRAW_TYPES]; // Count of each type of draw in this CB
+ CB_STATE state; // Track cmd buffer update state
+ uint64_t submitCount; // Number of times CB has been submitted
+ CBStatusFlags status; // Track status of various bindings on cmd buffer
+ CMD_TYPE last_cmd; // Last command written to the CB
// Currently storing "lastBound" objects on per-CB basis
// long-term may want to create caches of "lastBound" states and could have
// each individual CMD_NODE referencing its own "lastBound" state
@@ -602,7 +625,7 @@
std::vector<VkEvent> writeEventsBeforeWait;
std::vector<VkEvent> events;
std::unordered_map<QueryObject, std::unordered_set<VkEvent>> waitedEventsBeforeQueryReset;
- std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
+ std::unordered_map<QueryObject, bool> queryToStateMap; // 0 is unavailable, 1 is available
std::unordered_set<QueryObject> activeQueries;
std::unordered_set<QueryObject> startedQueries;
std::unordered_map<ImageSubresourcePair, IMAGE_CMD_BUF_LAYOUT_NODE> imageLayoutMap;
@@ -610,7 +633,7 @@
std::unordered_map<VkEvent, VkPipelineStageFlags> eventToStageMap;
std::vector<DRAW_DATA> drawData;
DRAW_DATA currentDrawData;
- bool vertex_buffer_used; // Track for perf warning to make sure any bound vtx buffer used
+ bool vertex_buffer_used; // Track for perf warning to make sure any bound vtx buffer used
VkCommandBuffer primaryCommandBuffer;
// Track images and buffers that are updated by this CB at the point of a draw
std::unordered_set<VkImageView> updateImages;
@@ -671,4 +694,4 @@
void AddCommandBufferBindingBufferView(const layer_data *, GLOBAL_CB_NODE *, BUFFER_VIEW_STATE *);
}
-#endif // CORE_VALIDATION_TYPES_H_
+#endif // CORE_VALIDATION_TYPES_H_
diff --git a/layers/descriptor_sets.cpp b/layers/descriptor_sets.cpp
index 6ad8dc0..f071fcd 100644
--- a/layers/descriptor_sets.cpp
+++ b/layers/descriptor_sets.cpp
@@ -37,11 +37,11 @@
for (uint32_t i = 0; i < binding_count_; ++i) {
auto binding_num = p_create_info->pBindings[i].binding;
descriptor_count_ += p_create_info->pBindings[i].descriptorCount;
- uint32_t insert_index = 0; // Track vector index where we insert element
+ uint32_t insert_index = 0; // Track vector index where we insert element
if (bindings_.empty() || binding_num > bindings_.back().binding) {
bindings_.push_back(safe_VkDescriptorSetLayoutBinding(&p_create_info->pBindings[i]));
insert_index = static_cast<uint32_t>(bindings_.size()) - 1;
- } else { // out-of-order binding number, need to insert into vector in-order
+ } else { // out-of-order binding number, need to insert into vector in-order
auto it = bindings_.begin();
// Find currently binding's spot in vector
while (binding_num > it->binding) {
@@ -99,22 +99,20 @@
// put all bindings into the given set
void cvdescriptorset::DescriptorSetLayout::FillBindingSet(std::unordered_set<uint32_t> *binding_set) const {
- for (auto binding_index_pair : binding_to_index_map_)
- binding_set->insert(binding_index_pair.first);
+ for (auto binding_index_pair : binding_to_index_map_) binding_set->insert(binding_index_pair.first);
}
-VkDescriptorSetLayoutBinding const *
-cvdescriptorset::DescriptorSetLayout::GetDescriptorSetLayoutBindingPtrFromBinding(const uint32_t binding) const {
+VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayout::GetDescriptorSetLayoutBindingPtrFromBinding(
+ const uint32_t binding) const {
const auto &bi_itr = binding_to_index_map_.find(binding);
if (bi_itr != binding_to_index_map_.end()) {
return bindings_[bi_itr->second].ptr();
}
return nullptr;
}
-VkDescriptorSetLayoutBinding const *
-cvdescriptorset::DescriptorSetLayout::GetDescriptorSetLayoutBindingPtrFromIndex(const uint32_t index) const {
- if (index >= bindings_.size())
- return nullptr;
+VkDescriptorSetLayoutBinding const *cvdescriptorset::DescriptorSetLayout::GetDescriptorSetLayoutBindingPtrFromIndex(
+ const uint32_t index) const {
+ if (index >= bindings_.size()) return nullptr;
return bindings_[index].ptr();
}
// Return descriptorCount for given binding, 0 if index is unavailable
@@ -127,8 +125,7 @@
}
// Return descriptorCount for given index, 0 if index is unavailable
uint32_t cvdescriptorset::DescriptorSetLayout::GetDescriptorCountFromIndex(const uint32_t index) const {
- if (index >= bindings_.size())
- return 0;
+ if (index >= bindings_.size()) return 0;
return bindings_[index].descriptorCount;
}
// For the given binding, return descriptorType
@@ -151,10 +148,9 @@
uint32_t global_offset = 0;
for (auto binding : bindings_) {
global_offset += binding.descriptorCount;
- if (index < global_offset)
- return binding.descriptorType;
+ if (index < global_offset) return binding.descriptorType;
}
- assert(0); // requested global index is out of bounds
+ assert(0); // requested global index is out of bounds
return VK_DESCRIPTOR_TYPE_MAX_ENUM;
}
// For the given binding, return stageFlags
@@ -206,14 +202,13 @@
// else return false and fill in error_msg will description of what causes incompatibility
bool cvdescriptorset::DescriptorSetLayout::IsCompatible(const DescriptorSetLayout *rh_ds_layout, std::string *error_msg) const {
// Trivial case
- if (layout_ == rh_ds_layout->GetDescriptorSetLayout())
- return true;
+ if (layout_ == rh_ds_layout->GetDescriptorSetLayout()) return true;
if (descriptor_count_ != rh_ds_layout->descriptor_count_) {
std::stringstream error_str;
error_str << "DescriptorSetLayout " << layout_ << " has " << descriptor_count_ << " descriptors, but DescriptorSetLayout "
<< rh_ds_layout->GetDescriptorSetLayout() << " has " << rh_ds_layout->descriptor_count_ << " descriptors.";
*error_msg = error_str.str();
- return false; // trivial fail case
+ return false; // trivial fail case
}
// Descriptor counts match so need to go through bindings one-by-one
// and verify that type and stageFlags match
@@ -250,8 +245,7 @@
}
bool cvdescriptorset::DescriptorSetLayout::IsNextBindingConsistent(const uint32_t binding) const {
- if (!binding_to_index_map_.count(binding + 1))
- return false;
+ if (!binding_to_index_map_.count(binding + 1)) return false;
auto const &bi_itr = binding_to_index_map_.find(binding);
if (bi_itr != binding_to_index_map_.end()) {
const auto &next_bi_itr = binding_to_index_map_.find(binding + 1);
@@ -288,7 +282,7 @@
binding_remaining = GetDescriptorCountFromBinding(++current_binding);
}
binding_remaining -= offset;
- while (update_count > binding_remaining) { // While our updates overstep current binding
+ while (update_count > binding_remaining) { // While our updates overstep current binding
// Verify next consecutive binding matches type, stage flags & immutable sampler use
if (!IsNextBindingConsistent(current_binding++)) {
std::stringstream error_str;
@@ -316,48 +310,48 @@
for (uint32_t i = 0; i < p_layout_->GetBindingCount(); ++i) {
auto type = p_layout_->GetTypeFromIndex(i);
switch (type) {
- case VK_DESCRIPTOR_TYPE_SAMPLER: {
- auto immut_sampler = p_layout_->GetImmutableSamplerPtrFromIndex(i);
- for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
- if (immut_sampler)
- descriptors_.emplace_back(new SamplerDescriptor(immut_sampler + di));
- else
- descriptors_.emplace_back(new SamplerDescriptor());
+ case VK_DESCRIPTOR_TYPE_SAMPLER: {
+ auto immut_sampler = p_layout_->GetImmutableSamplerPtrFromIndex(i);
+ for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
+ if (immut_sampler)
+ descriptors_.emplace_back(new SamplerDescriptor(immut_sampler + di));
+ else
+ descriptors_.emplace_back(new SamplerDescriptor());
+ }
+ break;
}
- break;
- }
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
- auto immut = p_layout_->GetImmutableSamplerPtrFromIndex(i);
- for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
- if (immut)
- descriptors_.emplace_back(new ImageSamplerDescriptor(immut + di));
- else
- descriptors_.emplace_back(new ImageSamplerDescriptor());
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+ auto immut = p_layout_->GetImmutableSamplerPtrFromIndex(i);
+ for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di) {
+ if (immut)
+ descriptors_.emplace_back(new ImageSamplerDescriptor(immut + di));
+ else
+ descriptors_.emplace_back(new ImageSamplerDescriptor());
+ }
+ break;
}
- break;
- }
- // ImageDescriptors
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
- for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
- descriptors_.emplace_back(new ImageDescriptor(type));
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
- descriptors_.emplace_back(new TexelDescriptor(type));
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
- descriptors_.emplace_back(new BufferDescriptor(type));
- break;
- default:
- assert(0); // Bad descriptor type specified
- break;
+ // ImageDescriptors
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
+ descriptors_.emplace_back(new ImageDescriptor(type));
+ break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
+ descriptors_.emplace_back(new TexelDescriptor(type));
+ break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+ for (uint32_t di = 0; di < p_layout_->GetDescriptorCountFromIndex(i); ++di)
+ descriptors_.emplace_back(new BufferDescriptor(type));
+ break;
+ default:
+ assert(0); // Bad descriptor type specified
+ break;
}
}
}
@@ -368,14 +362,12 @@
std::string result("");
for (unsigned i = 0; i <= VK_IMAGE_VIEW_TYPE_END_RANGE; i++) {
if (req & (1 << i)) {
- if (result.size())
- result += ", ";
+ if (result.size()) result += ", ";
result += string_VkImageViewType(VkImageViewType(i));
}
}
- if (!result.size())
- result = "(none)";
+ if (!result.size()) result = "(none)";
return result;
}
@@ -405,7 +397,7 @@
// Nothing to do for strictly immutable sampler
} else {
auto end_idx = p_layout_->GetGlobalEndIndexFromBinding(binding);
- auto array_idx = 0; // Track array idx if we're dealing with array descriptors
+ auto array_idx = 0; // Track array idx if we're dealing with array descriptors
for (uint32_t i = start_idx; i <= end_idx; ++i, ++array_idx) {
if (!descriptors_[i]->updated) {
std::stringstream error_str;
@@ -579,8 +571,7 @@
offset = 0;
binding_being_updated++;
}
- if (update->descriptorCount)
- some_update_ = true;
+ if (update->descriptorCount) some_update_ = true;
InvalidateBoundCmdBuffers();
}
@@ -670,8 +661,7 @@
}
}
// Update parameters all look good and descriptor updated so verify update contents
- if (!VerifyCopyUpdateContents(update, src_set, src_type, src_start_idx, error_code, error_msg))
- return false;
+ if (!VerifyCopyUpdateContents(update, src_set, src_type, src_start_idx, error_code, error_msg)) return false;
// All checks passed so update is good
return true;
@@ -684,8 +674,7 @@
for (uint32_t di = 0; di < update->descriptorCount; ++di) {
descriptors_[dst_start_idx + di]->CopyUpdate(src_set->descriptors_[src_start_idx + di].get());
}
- if (update->descriptorCount)
- some_update_ = true;
+ if (update->descriptorCount) some_update_ = true;
InvalidateBoundCmdBuffers();
}
@@ -785,72 +774,72 @@
// vkCreateImageView(). What's the best way to create unique id for these cases?
bool ds = vk_format_is_depth_or_stencil(format);
switch (image_layout) {
- case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
- // Only Color bit must be set
- if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
- std::stringstream error_str;
- error_str << "ImageView (" << image_view << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does "
- "not have VK_IMAGE_ASPECT_COLOR_BIT set.";
- *error_msg = error_str.str();
- return false;
- }
- // format must NOT be DS
- if (ds) {
- std::stringstream error_str;
- error_str << "ImageView (" << image_view
- << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is "
- << string_VkFormat(format) << " which is not a color format.";
- *error_msg = error_str.str();
- return false;
- }
- break;
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
- case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
- // Depth or stencil bit must be set, but both must NOT be set
- if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
- if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
- // both must NOT be set
+ case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
+ // Only Color bit must be set
+ if ((aspect_mask & VK_IMAGE_ASPECT_COLOR_BIT) != VK_IMAGE_ASPECT_COLOR_BIT) {
std::stringstream error_str;
- error_str << "ImageView (" << image_view << ") has both STENCIL and DEPTH aspects set";
+ error_str << "ImageView (" << image_view << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but does "
+ "not have VK_IMAGE_ASPECT_COLOR_BIT set.";
*error_msg = error_str.str();
return false;
}
- } else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
- // Neither were set
- std::stringstream error_str;
- error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
- << " but does not have STENCIL or DEPTH aspects set";
- *error_msg = error_str.str();
- return false;
- }
- // format must be DS
- if (!ds) {
- std::stringstream error_str;
- error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
- << " but the image format is " << string_VkFormat(format) << " which is not a depth/stencil format.";
- *error_msg = error_str.str();
- return false;
- }
- break;
- default:
- // For other layouts if the source is depth/stencil image, both aspect bits must not be set
- if (ds) {
+ // format must NOT be DS
+ if (ds) {
+ std::stringstream error_str;
+ error_str << "ImageView (" << image_view
+ << ") uses layout VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL but the image format is "
+ << string_VkFormat(format) << " which is not a color format.";
+ *error_msg = error_str.str();
+ return false;
+ }
+ break;
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
+ // Depth or stencil bit must be set, but both must NOT be set
if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
// both must NOT be set
std::stringstream error_str;
- error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
- << " and is using depth/stencil image of format " << string_VkFormat(format)
- << " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil "
- "image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or "
- "VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil "
- "reads respectively.";
+ error_str << "ImageView (" << image_view << ") has both STENCIL and DEPTH aspects set";
*error_msg = error_str.str();
return false;
}
+ } else if (!(aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT)) {
+ // Neither were set
+ std::stringstream error_str;
+ error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
+ << " but does not have STENCIL or DEPTH aspects set";
+ *error_msg = error_str.str();
+ return false;
}
- }
- break;
+ // format must be DS
+ if (!ds) {
+ std::stringstream error_str;
+ error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
+ << " but the image format is " << string_VkFormat(format) << " which is not a depth/stencil format.";
+ *error_msg = error_str.str();
+ return false;
+ }
+ break;
+ default:
+ // For other layouts if the source is depth/stencil image, both aspect bits must not be set
+ if (ds) {
+ if (aspect_mask & VK_IMAGE_ASPECT_DEPTH_BIT) {
+ if (aspect_mask & VK_IMAGE_ASPECT_STENCIL_BIT) {
+ // both must NOT be set
+ std::stringstream error_str;
+ error_str << "ImageView (" << image_view << ") has layout " << string_VkImageLayout(image_layout)
+ << " and is using depth/stencil image of format " << string_VkFormat(format)
+ << " but it has both STENCIL and DEPTH aspects set, which is illegal. When using a depth/stencil "
+ "image in a descriptor set, please only set either VK_IMAGE_ASPECT_DEPTH_BIT or "
+ "VK_IMAGE_ASPECT_STENCIL_BIT depending on whether it will be used for depth reads or stencil "
+ "reads respectively.";
+ *error_msg = error_str.str();
+ return false;
+ }
+ }
+ }
+ break;
}
// Now validate that usage flags are correctly set for given type of update
// As we're switching per-type, if any type has specific layout requirements, check those here as well
@@ -860,36 +849,37 @@
// identify swizzle
std::string error_usage_bit;
switch (type) {
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
- if (!(usage & VK_IMAGE_USAGE_SAMPLED_BIT)) {
- error_usage_bit = "VK_IMAGE_USAGE_SAMPLED_BIT";
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+ if (!(usage & VK_IMAGE_USAGE_SAMPLED_BIT)) {
+ error_usage_bit = "VK_IMAGE_USAGE_SAMPLED_BIT";
+ }
+ break;
}
- break;
- }
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
- if (!(usage & VK_IMAGE_USAGE_STORAGE_BIT)) {
- error_usage_bit = "VK_IMAGE_USAGE_STORAGE_BIT";
- } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) {
- std::stringstream error_str;
- // TODO : Need to create custom enum error code for this case
- error_str << "ImageView (" << image_view << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
- << string_VkImageLayout(image_layout)
- << " but according to spec section 13.1 Descriptor Types, 'Load and store operations on storage images can "
- "only be done on images in VK_IMAGE_LAYOUT_GENERAL layout.'";
- *error_msg = error_str.str();
- return false;
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
+ if (!(usage & VK_IMAGE_USAGE_STORAGE_BIT)) {
+ error_usage_bit = "VK_IMAGE_USAGE_STORAGE_BIT";
+ } else if (VK_IMAGE_LAYOUT_GENERAL != image_layout) {
+ std::stringstream error_str;
+ // TODO : Need to create custom enum error code for this case
+ error_str
+ << "ImageView (" << image_view << ") of VK_DESCRIPTOR_TYPE_STORAGE_IMAGE type is being updated with layout "
+ << string_VkImageLayout(image_layout)
+ << " but according to spec section 13.1 Descriptor Types, 'Load and store operations on storage images can "
+ "only be done on images in VK_IMAGE_LAYOUT_GENERAL layout.'";
+ *error_msg = error_str.str();
+ return false;
+ }
+ break;
}
- break;
- }
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
- if (!(usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) {
- error_usage_bit = "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
+ if (!(usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) {
+ error_usage_bit = "VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT";
+ }
+ break;
}
- break;
- }
- default:
- break;
+ default:
+ break;
}
if (!error_usage_bit.empty()) {
std::stringstream error_str;
@@ -918,8 +908,7 @@
void cvdescriptorset::SamplerDescriptor::BindCommandBuffer(const core_validation::layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
if (!immutable_) {
auto sampler_state = getSamplerState(dev_data, sampler_);
- if (sampler_state)
- core_validation::AddCommandBufferBindingSampler(cb_node, sampler_state);
+ if (sampler_state) core_validation::AddCommandBufferBindingSampler(cb_node, sampler_state);
}
}
@@ -965,8 +954,7 @@
// First add binding for any non-immutable sampler
if (!immutable_) {
auto sampler_state = getSamplerState(dev_data, sampler_);
- if (sampler_state)
- core_validation::AddCommandBufferBindingSampler(cb_node, sampler_state);
+ if (sampler_state) core_validation::AddCommandBufferBindingSampler(cb_node, sampler_state);
}
// Add binding for image
auto iv_state = getImageViewState(dev_data, image_view_);
@@ -979,8 +967,7 @@
: storage_(false), image_view_(VK_NULL_HANDLE), image_layout_(VK_IMAGE_LAYOUT_UNDEFINED) {
updated = false;
descriptor_class = Image;
- if (VK_DESCRIPTOR_TYPE_STORAGE_IMAGE == type)
- storage_ = true;
+ if (VK_DESCRIPTOR_TYPE_STORAGE_IMAGE == type) storage_ = true;
};
void cvdescriptorset::ImageDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
@@ -1037,15 +1024,13 @@
void cvdescriptorset::BufferDescriptor::BindCommandBuffer(const core_validation::layer_data *dev_data, GLOBAL_CB_NODE *cb_node) {
auto buffer_node = getBufferState(dev_data, buffer_);
- if (buffer_node)
- core_validation::AddCommandBufferBindingBuffer(dev_data, cb_node, buffer_node);
+ if (buffer_node) core_validation::AddCommandBufferBindingBuffer(dev_data, cb_node, buffer_node);
}
cvdescriptorset::TexelDescriptor::TexelDescriptor(const VkDescriptorType type) : buffer_view_(VK_NULL_HANDLE), storage_(false) {
updated = false;
descriptor_class = TexelBuffer;
- if (VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER == type)
- storage_ = true;
+ if (VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER == type) storage_ = true;
};
void cvdescriptorset::TexelDescriptor::WriteUpdate(const VkWriteDescriptorSet *update, const uint32_t index) {
@@ -1227,34 +1212,34 @@
auto usage = buffer_node->createInfo.usage;
std::string error_usage_bit;
switch (type) {
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- if (!(usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)) {
- *error_code = VALIDATION_ERROR_00950;
- error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT";
- }
- break;
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- if (!(usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)) {
- *error_code = VALIDATION_ERROR_00951;
- error_usage_bit = "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
- }
- break;
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- if (!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) {
- *error_code = VALIDATION_ERROR_00946;
- error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT";
- }
- break;
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
- if (!(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)) {
- *error_code = VALIDATION_ERROR_00947;
- error_usage_bit = "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
- }
- break;
- default:
- break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ if (!(usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT)) {
+ *error_code = VALIDATION_ERROR_00950;
+ error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT";
+ }
+ break;
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ if (!(usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT)) {
+ *error_code = VALIDATION_ERROR_00951;
+ error_usage_bit = "VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT";
+ }
+ break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ if (!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) {
+ *error_code = VALIDATION_ERROR_00946;
+ error_usage_bit = "VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT";
+ }
+ break;
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+ if (!(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)) {
+ *error_code = VALIDATION_ERROR_00947;
+ error_usage_bit = "VK_BUFFER_USAGE_STORAGE_BUFFER_BIT";
+ }
+ break;
+ default:
+ break;
}
if (!error_usage_bit.empty()) {
std::stringstream error_str;
@@ -1326,91 +1311,92 @@
UNIQUE_VALIDATION_ERROR_CODE *error_code,
std::string *error_msg) const {
switch (update->descriptorType) {
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- // Validate image
- auto image_view = update->pImageInfo[di].imageView;
- auto image_layout = update->pImageInfo[di].imageLayout;
- if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted write update to combined image sampler descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
- }
- // Intentional fall-through to validate sampler
- }
- case VK_DESCRIPTOR_TYPE_SAMPLER: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- if (!descriptors_[index + di].get()->IsImmutableSampler()) {
- if (!ValidateSampler(update->pImageInfo[di].sampler, device_data_)) {
- *error_code = VALIDATION_ERROR_00942;
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ // Validate image
+ auto image_view = update->pImageInfo[di].imageView;
+ auto image_layout = update->pImageInfo[di].imageLayout;
+ if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, error_code, error_msg)) {
std::stringstream error_str;
- error_str << "Attempted write update to sampler descriptor with invalid sampler: "
- << update->pImageInfo[di].sampler << ".";
+ error_str << "Attempted write update to combined image sampler descriptor failed due to: "
+ << error_msg->c_str();
*error_msg = error_str.str();
return false;
}
- } else {
- // TODO : Warn here
}
+ // Intentional fall-through to validate sampler
}
- break;
- }
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- auto image_view = update->pImageInfo[di].imageView;
- auto image_layout = update->pImageInfo[di].imageLayout;
- if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
+ case VK_DESCRIPTOR_TYPE_SAMPLER: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ if (!descriptors_[index + di].get()->IsImmutableSampler()) {
+ if (!ValidateSampler(update->pImageInfo[di].sampler, device_data_)) {
+ *error_code = VALIDATION_ERROR_00942;
+ std::stringstream error_str;
+ error_str << "Attempted write update to sampler descriptor with invalid sampler: "
+ << update->pImageInfo[di].sampler << ".";
+ *error_msg = error_str.str();
+ return false;
+ }
+ } else {
+ // TODO : Warn here
+ }
}
+ break;
}
- break;
- }
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- auto buffer_view = update->pTexelBufferView[di];
- auto bv_state = getBufferViewState(device_data_, buffer_view);
- if (!bv_state) {
- *error_code = VALIDATION_ERROR_00940;
- std::stringstream error_str;
- error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " << buffer_view;
- *error_msg = error_str.str();
- return false;
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ auto image_view = update->pImageInfo[di].imageView;
+ auto image_layout = update->pImageInfo[di].imageLayout;
+ if (!ValidateImageUpdate(image_view, image_layout, update->descriptorType, device_data_, error_code, error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted write update to image descriptor failed due to: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
}
- auto buffer = bv_state->create_info.buffer;
- if (!ValidateBufferUsage(getBufferState(device_data_, buffer), update->descriptorType, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
+ break;
}
- break;
- }
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ auto buffer_view = update->pTexelBufferView[di];
+ auto bv_state = getBufferViewState(device_data_, buffer_view);
+ if (!bv_state) {
+ *error_code = VALIDATION_ERROR_00940;
+ std::stringstream error_str;
+ error_str << "Attempted write update to texel buffer descriptor with invalid buffer view: " << buffer_view;
+ *error_msg = error_str.str();
+ return false;
+ }
+ auto buffer = bv_state->create_info.buffer;
+ if (!ValidateBufferUsage(getBufferState(device_data_, buffer), update->descriptorType, error_code, error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted write update to texel buffer descriptor failed due to: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
}
+ break;
}
- break;
- }
- default:
- assert(0); // We've already verified update type so should never get here
- break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ if (!ValidateBufferUpdate(update->pBufferInfo + di, update->descriptorType, error_code, error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted write update to buffer descriptor failed due to: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
+ }
+ break;
+ }
+ default:
+ assert(0); // We've already verified update type so should never get here
+ break;
}
// All checks passed so update contents are good
return true;
@@ -1423,101 +1409,101 @@
// Note : Repurposing some Write update error codes here as specific details aren't called out for copy updates like they are
// for write updates
switch (src_set->descriptors_[index]->descriptor_class) {
- case PlainSampler: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- if (!src_set->descriptors_[index + di]->IsImmutableSampler()) {
- auto update_sampler = static_cast<SamplerDescriptor *>(src_set->descriptors_[index + di].get())->GetSampler();
- if (!ValidateSampler(update_sampler, device_data_)) {
- *error_code = VALIDATION_ERROR_00942;
+ case PlainSampler: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ if (!src_set->descriptors_[index + di]->IsImmutableSampler()) {
+ auto update_sampler = static_cast<SamplerDescriptor *>(src_set->descriptors_[index + di].get())->GetSampler();
+ if (!ValidateSampler(update_sampler, device_data_)) {
+ *error_code = VALIDATION_ERROR_00942;
+ std::stringstream error_str;
+ error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
+ *error_msg = error_str.str();
+ return false;
+ }
+ } else {
+ // TODO : Warn here
+ }
+ }
+ break;
+ }
+ case ImageSampler: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_set->descriptors_[index + di].get());
+ // First validate sampler
+ if (!img_samp_desc->IsImmutableSampler()) {
+ auto update_sampler = img_samp_desc->GetSampler();
+ if (!ValidateSampler(update_sampler, device_data_)) {
+ *error_code = VALIDATION_ERROR_00942;
+ std::stringstream error_str;
+ error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
+ *error_msg = error_str.str();
+ return false;
+ }
+ } else {
+ // TODO : Warn here
+ }
+ // Validate image
+ auto image_view = img_samp_desc->GetImageView();
+ auto image_layout = img_samp_desc->GetImageLayout();
+ if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, error_code, error_msg)) {
std::stringstream error_str;
- error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
+ error_str << "Attempted copy update to combined image sampler descriptor failed due to: " << error_msg->c_str();
*error_msg = error_str.str();
return false;
}
- } else {
- // TODO : Warn here
}
+ break;
}
- break;
- }
- case ImageSampler: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- auto img_samp_desc = static_cast<const ImageSamplerDescriptor *>(src_set->descriptors_[index + di].get());
- // First validate sampler
- if (!img_samp_desc->IsImmutableSampler()) {
- auto update_sampler = img_samp_desc->GetSampler();
- if (!ValidateSampler(update_sampler, device_data_)) {
- *error_code = VALIDATION_ERROR_00942;
+ case Image: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ auto img_desc = static_cast<const ImageDescriptor *>(src_set->descriptors_[index + di].get());
+ auto image_view = img_desc->GetImageView();
+ auto image_layout = img_desc->GetImageLayout();
+ if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, error_code, error_msg)) {
std::stringstream error_str;
- error_str << "Attempted copy update to sampler descriptor with invalid sampler: " << update_sampler << ".";
+ error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str();
*error_msg = error_str.str();
return false;
}
- } else {
- // TODO : Warn here
}
- // Validate image
- auto image_view = img_samp_desc->GetImageView();
- auto image_layout = img_samp_desc->GetImageLayout();
- if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted copy update to combined image sampler descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
+ break;
}
- break;
- }
- case Image: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- auto img_desc = static_cast<const ImageDescriptor *>(src_set->descriptors_[index + di].get());
- auto image_view = img_desc->GetImageView();
- auto image_layout = img_desc->GetImageLayout();
- if (!ValidateImageUpdate(image_view, image_layout, type, device_data_, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted copy update to image descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
+ case TexelBuffer: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ auto buffer_view = static_cast<TexelDescriptor *>(src_set->descriptors_[index + di].get())->GetBufferView();
+ auto bv_state = getBufferViewState(device_data_, buffer_view);
+ if (!bv_state) {
+ *error_code = VALIDATION_ERROR_00940;
+ std::stringstream error_str;
+ error_str << "Attempted copy update to texel buffer descriptor with invalid buffer view: " << buffer_view;
+ *error_msg = error_str.str();
+ return false;
+ }
+ auto buffer = bv_state->create_info.buffer;
+ if (!ValidateBufferUsage(getBufferState(device_data_, buffer), type, error_code, error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
}
+ break;
}
- break;
- }
- case TexelBuffer: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- auto buffer_view = static_cast<TexelDescriptor *>(src_set->descriptors_[index + di].get())->GetBufferView();
- auto bv_state = getBufferViewState(device_data_, buffer_view);
- if (!bv_state) {
- *error_code = VALIDATION_ERROR_00940;
- std::stringstream error_str;
- error_str << "Attempted copy update to texel buffer descriptor with invalid buffer view: " << buffer_view;
- *error_msg = error_str.str();
- return false;
+ case GeneralBuffer: {
+ for (uint32_t di = 0; di < update->descriptorCount; ++di) {
+ auto buffer = static_cast<BufferDescriptor *>(src_set->descriptors_[index + di].get())->GetBuffer();
+ if (!ValidateBufferUsage(getBufferState(device_data_, buffer), type, error_code, error_msg)) {
+ std::stringstream error_str;
+ error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str();
+ *error_msg = error_str.str();
+ return false;
+ }
}
- auto buffer = bv_state->create_info.buffer;
- if (!ValidateBufferUsage(getBufferState(device_data_, buffer), type, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted copy update to texel buffer descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
+ break;
}
- break;
- }
- case GeneralBuffer: {
- for (uint32_t di = 0; di < update->descriptorCount; ++di) {
- auto buffer = static_cast<BufferDescriptor *>(src_set->descriptors_[index + di].get())->GetBuffer();
- if (!ValidateBufferUsage(getBufferState(device_data_, buffer), type, error_code, error_msg)) {
- std::stringstream error_str;
- error_str << "Attempted copy update to buffer descriptor failed due to: " << error_msg->c_str();
- *error_msg = error_str.str();
- return false;
- }
- }
- break;
- }
- default:
- assert(0); // We've already verified update type so should never get here
- break;
+ default:
+ assert(0); // We've already verified update type so should never get here
+ break;
}
// All checks passed so update contents are good
return true;
diff --git a/layers/descriptor_sets.h b/layers/descriptor_sets.h
index 055985a..c51b373 100644
--- a/layers/descriptor_sets.h
+++ b/layers/descriptor_sets.h
@@ -90,7 +90,7 @@
*/
namespace cvdescriptorset {
class DescriptorSetLayout {
- public:
+ public:
// Constructors and destructor
DescriptorSetLayout(const VkDescriptorSetLayoutCreateInfo *p_create_info, const VkDescriptorSetLayout layout);
// Validate create info - should be called prior to creation
@@ -126,7 +126,7 @@
int32_t GetDynamicOffsetIndexFromBinding(uint32_t binding) const {
auto dyn_off = binding_to_dynamic_array_idx_map_.find(binding);
if (dyn_off == binding_to_dynamic_array_idx_map_.end()) {
- assert(0); // Requesting dyn offset for invalid binding/array idx pair
+ assert(0); // Requesting dyn offset for invalid binding/array idx pair
return -1;
}
return dyn_off->second;
@@ -139,7 +139,7 @@
// updated, verify that for any binding boundaries crossed, the update is consistent
bool VerifyUpdateConsistency(uint32_t, uint32_t, uint32_t, const char *, const VkDescriptorSet, std::string *) const;
- private:
+ private:
VkDescriptorSetLayout layout_;
std::map<uint32_t, uint32_t> binding_to_index_map_;
std::unordered_map<uint32_t, uint32_t> binding_to_global_start_index_map_;
@@ -147,9 +147,9 @@
// For a given binding map to associated index in the dynamic offset array
std::unordered_map<uint32_t, uint32_t> binding_to_dynamic_array_idx_map_;
// VkDescriptorSetLayoutCreateFlags flags_;
- uint32_t binding_count_; // # of bindings in this layout
+ uint32_t binding_count_; // # of bindings in this layout
std::vector<safe_VkDescriptorSetLayoutBinding> bindings_;
- uint32_t descriptor_count_; // total # descriptors in this layout
+ uint32_t descriptor_count_; // total # descriptors in this layout
uint32_t dynamic_descriptor_count_;
};
@@ -164,7 +164,7 @@
enum DescriptorClass { PlainSampler, ImageSampler, Image, TexelBuffer, GeneralBuffer };
class Descriptor {
- public:
+ public:
virtual ~Descriptor(){};
virtual void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) = 0;
virtual void CopyUpdate(const Descriptor *) = 0;
@@ -177,7 +177,7 @@
virtual bool IsDynamic() const { return false; };
// Check for storage descriptor type
virtual bool IsStorage() const { return false; };
- bool updated; // Has descriptor been updated?
+ bool updated; // Has descriptor been updated?
DescriptorClass descriptor_class;
};
// Shared helper functions - These are useful because the shared sampler image descriptor type
@@ -187,7 +187,7 @@
UNIQUE_VALIDATION_ERROR_CODE *, std::string *);
class SamplerDescriptor : public Descriptor {
- public:
+ public:
SamplerDescriptor();
SamplerDescriptor(const VkSampler *);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
@@ -196,14 +196,14 @@
virtual bool IsImmutableSampler() const override { return immutable_; };
VkSampler GetSampler() const { return sampler_; }
- private:
+ private:
// bool ValidateSampler(const VkSampler) const;
VkSampler sampler_;
bool immutable_;
};
class ImageSamplerDescriptor : public Descriptor {
- public:
+ public:
ImageSamplerDescriptor();
ImageSamplerDescriptor(const VkSampler *);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
@@ -214,7 +214,7 @@
VkImageView GetImageView() const { return image_view_; }
VkImageLayout GetImageLayout() const { return image_layout_; }
- private:
+ private:
VkSampler sampler_;
bool immutable_;
VkImageView image_view_;
@@ -222,7 +222,7 @@
};
class ImageDescriptor : public Descriptor {
- public:
+ public:
ImageDescriptor(const VkDescriptorType);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
void CopyUpdate(const Descriptor *) override;
@@ -231,14 +231,14 @@
VkImageView GetImageView() const { return image_view_; }
VkImageLayout GetImageLayout() const { return image_layout_; }
- private:
+ private:
bool storage_;
VkImageView image_view_;
VkImageLayout image_layout_;
};
class TexelDescriptor : public Descriptor {
- public:
+ public:
TexelDescriptor(const VkDescriptorType);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
void CopyUpdate(const Descriptor *) override;
@@ -246,13 +246,13 @@
virtual bool IsStorage() const override { return storage_; }
VkBufferView GetBufferView() const { return buffer_view_; }
- private:
+ private:
VkBufferView buffer_view_;
bool storage_;
};
class BufferDescriptor : public Descriptor {
- public:
+ public:
BufferDescriptor(const VkDescriptorType);
void WriteUpdate(const VkWriteDescriptorSet *, const uint32_t) override;
void CopyUpdate(const Descriptor *) override;
@@ -263,7 +263,7 @@
VkDeviceSize GetOffset() const { return offset_; }
VkDeviceSize GetRange() const { return range_; }
- private:
+ private:
bool storage_;
bool dynamic_;
VkBuffer buffer_;
@@ -311,7 +311,7 @@
* be correct at the time of update.
*/
class DescriptorSet : public BASE_NODE {
- public:
+ public:
DescriptorSet(const VkDescriptorSet, const VkDescriptorPool, const DescriptorSetLayout *, const core_validation::layer_data *);
~DescriptorSet();
// A number of common Get* functions that return data based on layout from which this set was created
@@ -381,7 +381,7 @@
// Return true if any part of set has ever been updated
bool IsUpdated() const { return some_update_; };
- private:
+ private:
bool VerifyWriteUpdateContents(const VkWriteDescriptorSet *, const uint32_t, UNIQUE_VALIDATION_ERROR_CODE *,
std::string *) const;
bool VerifyCopyUpdateContents(const VkCopyDescriptorSet *, const DescriptorSet *, VkDescriptorType, uint32_t,
@@ -391,7 +391,7 @@
std::string *) const;
// Private helper to set all bound cmd buffers to INVALID state
void InvalidateBoundCmdBuffers();
- bool some_update_; // has any part of the set ever been updated?
+ bool some_update_; // has any part of the set ever been updated?
VkDescriptorSet set_;
DESCRIPTOR_POOL_STATE *pool_state_;
const DescriptorSetLayout *p_layout_;
@@ -400,4 +400,4 @@
const core_validation::layer_data *device_data_;
};
}
-#endif // CORE_VALIDATION_DESCRIPTOR_SETS_H_
+#endif // CORE_VALIDATION_DESCRIPTOR_SETS_H_
diff --git a/layers/image.cpp b/layers/image.cpp
index 08c9b0a..c82c0ac 100644
--- a/layers/image.cpp
+++ b/layers/image.cpp
@@ -68,7 +68,10 @@
unordered_map<VkImage, IMAGE_STATE> imageMap;
layer_data()
- : report_data(nullptr), device_dispatch_table(nullptr), instance_dispatch_table(nullptr), physicalDevice(0),
+ : report_data(nullptr),
+ device_dispatch_table(nullptr),
+ instance_dispatch_table(nullptr),
+ physicalDevice(0),
physicalDeviceProperties(){};
};
@@ -131,8 +134,7 @@
chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
- if (result != VK_SUCCESS)
- return result;
+ if (result != VK_SUCCESS) return result;
layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
my_data->instance = *pInstance;
@@ -307,23 +309,23 @@
// Make sure all required dimension are non-zero at least.
bool failedMinSize = false;
switch (pCreateInfo->imageType) {
- case VK_IMAGE_TYPE_3D:
- if (pCreateInfo->extent.depth == 0) {
- failedMinSize = true;
- }
- // Intentional fall-through
- case VK_IMAGE_TYPE_2D:
- if (pCreateInfo->extent.height == 0) {
- failedMinSize = true;
- }
- // Intentional fall-through
- case VK_IMAGE_TYPE_1D:
- if (pCreateInfo->extent.width == 0) {
- failedMinSize = true;
- }
- break;
- default:
- break;
+ case VK_IMAGE_TYPE_3D:
+ if (pCreateInfo->extent.depth == 0) {
+ failedMinSize = true;
+ }
+ // Intentional fall-through
+ case VK_IMAGE_TYPE_2D:
+ if (pCreateInfo->extent.height == 0) {
+ failedMinSize = true;
+ }
+ // Intentional fall-through
+ case VK_IMAGE_TYPE_1D:
+ if (pCreateInfo->extent.width == 0) {
+ failedMinSize = true;
+ }
+ break;
+ default:
+ break;
}
// TODO: VALIDATION_ERROR_00716
// this is *almost* VU 00716, except should not be condidtional on image type - all extents must be non-zero for all types
@@ -490,8 +492,9 @@
for (uint32_t i = 0; i < rangeCount; i++) {
if (((pRanges[i].aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) &&
((pRanges[i].aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT)) {
- char const str[] = "vkCmdClearDepthStencilImage aspectMasks for all subresource ranges must be "
- "set to VK_IMAGE_ASPECT_DEPTH_BIT and/or VK_IMAGE_ASPECT_STENCIL_BIT";
+ char const str[] =
+ "vkCmdClearDepthStencilImage aspectMasks for all subresource ranges must be "
+ "set to VK_IMAGE_ASPECT_DEPTH_BIT and/or VK_IMAGE_ASPECT_STENCIL_BIT";
skipCall |=
log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
(uint64_t)commandBuffer, __LINE__, IMAGE_INVALID_IMAGE_ASPECT, "IMAGE", str);
@@ -532,18 +535,18 @@
dst->dstSubresource.layerCount))) {
result = true;
switch (type) {
- case VK_IMAGE_TYPE_3D:
- result &= RangesIntersect(src->srcOffset.z, src->extent.depth, dst->dstOffset.z, dst->extent.depth);
- // Intentionally fall through to 2D case
- case VK_IMAGE_TYPE_2D:
- result &= RangesIntersect(src->srcOffset.y, src->extent.height, dst->dstOffset.y, dst->extent.height);
- // Intentionally fall through to 1D case
- case VK_IMAGE_TYPE_1D:
- result &= RangesIntersect(src->srcOffset.x, src->extent.width, dst->dstOffset.x, dst->extent.width);
- break;
- default:
- // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation
- assert(false);
+ case VK_IMAGE_TYPE_3D:
+ result &= RangesIntersect(src->srcOffset.z, src->extent.depth, dst->dstOffset.z, dst->extent.depth);
+ // Intentionally fall through to 2D case
+ case VK_IMAGE_TYPE_2D:
+ result &= RangesIntersect(src->srcOffset.y, src->extent.height, dst->dstOffset.y, dst->extent.height);
+ // Intentionally fall through to 1D case
+ case VK_IMAGE_TYPE_1D:
+ result &= RangesIntersect(src->srcOffset.x, src->extent.width, dst->dstOffset.x, dst->extent.width);
+ break;
+ default:
+ // Unrecognized or new IMAGE_TYPE enums will be caught in parameter_validation
+ assert(false);
}
}
return result;
@@ -554,26 +557,26 @@
bool result = false;
// Extents/depths cannot be negative but checks left in for clarity
switch (image->imageType) {
- case VK_IMAGE_TYPE_3D: // Validate z and depth
- if ((offset->z + extent->depth > image->extent.depth) || (offset->z < 0) ||
- ((offset->z + static_cast<int32_t>(extent->depth)) < 0)) {
- result = true;
- }
- // Intentionally fall through to 2D case to check height
- case VK_IMAGE_TYPE_2D: // Validate y and height
- if ((offset->y + extent->height > image->extent.height) || (offset->y < 0) ||
- ((offset->y + static_cast<int32_t>(extent->height)) < 0)) {
- result = true;
- }
- // Intentionally fall through to 1D case to check width
- case VK_IMAGE_TYPE_1D: // Validate x and width
- if ((offset->x + extent->width > image->extent.width) || (offset->x < 0) ||
- ((offset->x + static_cast<int32_t>(extent->width)) < 0)) {
- result = true;
- }
- break;
- default:
- assert(false);
+ case VK_IMAGE_TYPE_3D: // Validate z and depth
+ if ((offset->z + extent->depth > image->extent.depth) || (offset->z < 0) ||
+ ((offset->z + static_cast<int32_t>(extent->depth)) < 0)) {
+ result = true;
+ }
+ // Intentionally fall through to 2D case to check height
+ case VK_IMAGE_TYPE_2D: // Validate y and height
+ if ((offset->y + extent->height > image->extent.height) || (offset->y < 0) ||
+ ((offset->y + static_cast<int32_t>(extent->height)) < 0)) {
+ result = true;
+ }
+ // Intentionally fall through to 1D case to check width
+ case VK_IMAGE_TYPE_1D: // Validate x and width
+ if ((offset->x + extent->width > image->extent.width) || (offset->x < 0) ||
+ ((offset->x + static_cast<int32_t>(extent->width)) < 0)) {
+ result = true;
+ }
+ break;
+ default:
+ assert(false);
}
return result;
}
@@ -588,9 +591,7 @@
// TODO: This does not cover swapchain-created images. This should fall out when this layer is moved
// into the core_validation layer
if (src_image_entry && dst_image_entry) {
-
for (uint32_t i = 0; i < region_count; i++) {
-
if (regions[i].srcSubresource.layerCount == 0) {
std::stringstream ss;
ss << "vkCmdCopyImage: number of layers in pRegions[" << i << "] srcSubresource is zero";
@@ -770,7 +771,6 @@
VKAPI_ATTR void VKAPI_CALL CmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout,
VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
const VkImageCopy *pRegions) {
-
bool skip = false;
layer_data *device_data = get_my_data_ptr(get_dispatch_key(commandBuffer), layer_data_map);
@@ -810,8 +810,9 @@
// Having eliminated all other possibilities, image aspect must be depth or stencil or both
if (((aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) != VK_IMAGE_ASPECT_DEPTH_BIT) &&
((aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) != VK_IMAGE_ASPECT_STENCIL_BIT)) {
- char const str[] = "vkCmdClearAttachments aspectMask [%d] must be set to VK_IMAGE_ASPECT_DEPTH_BIT and/or "
- "VK_IMAGE_ASPECT_STENCIL_BIT. %s";
+ char const str[] =
+ "vkCmdClearAttachments aspectMask [%d] must be set to VK_IMAGE_ASPECT_DEPTH_BIT and/or "
+ "VK_IMAGE_ASPECT_STENCIL_BIT. %s";
skipCall |= log_msg(device_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT,
VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, (uint64_t)commandBuffer, __LINE__,
VALIDATION_ERROR_01127, "IMAGE", str, i, validation_error_map[VALIDATION_ERROR_01127]);
@@ -829,10 +830,8 @@
bool skip = false;
for (uint32_t i = 0; i < regionCount; i++) {
-
auto image_info = getImageState(dev_data, image);
if (image_info) {
-
if ((image_info->imageType == VK_IMAGE_TYPE_1D) || (image_info->imageType == VK_IMAGE_TYPE_2D)) {
if ((pRegions[i].imageOffset.z != 0) || (pRegions[i].imageExtent.depth != 1)) {
skip |= log_msg(dev_data->report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
@@ -960,7 +959,6 @@
auto dstImageEntry = getImageState(device_data, dstImage);
if (srcImageEntry && dstImageEntry) {
-
VkFormat srcFormat = srcImageEntry->format;
VkFormat dstFormat = dstImageEntry->format;
@@ -1262,8 +1260,7 @@
VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
uint32_t *pCount, VkExtensionProperties *pProperties) {
// Image does not have any physical device extensions
- if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
- return util_GetExtensionProperties(0, NULL, pCount, pProperties);
+ if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(0, NULL, pCount, pProperties);
assert(physicalDevice);
@@ -1278,8 +1275,7 @@
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
PFN_vkVoidFunction proc = intercept_core_device_command(funcName);
- if (proc)
- return proc;
+ if (proc) return proc;
assert(device);
@@ -1287,29 +1283,24 @@
VkLayerDispatchTable *pTable = my_data->device_dispatch_table;
{
- if (pTable->GetDeviceProcAddr == NULL)
- return NULL;
+ if (pTable->GetDeviceProcAddr == NULL) return NULL;
return pTable->GetDeviceProcAddr(device, funcName);
}
}
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
PFN_vkVoidFunction proc = intercept_core_instance_command(funcName);
- if (!proc)
- proc = intercept_core_device_command(funcName);
- if (proc)
- return proc;
+ if (!proc) proc = intercept_core_device_command(funcName);
+ if (proc) return proc;
assert(instance);
layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
proc = debug_report_get_instance_proc_addr(my_data->report_data, funcName);
- if (proc)
- return proc;
+ if (proc) return proc;
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
- if (pTable->GetInstanceProcAddr == NULL)
- return NULL;
+ if (pTable->GetInstanceProcAddr == NULL) return NULL;
return pTable->GetInstanceProcAddr(instance, funcName);
}
@@ -1319,8 +1310,7 @@
layer_data *my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
- if (pTable->GetPhysicalDeviceProcAddr == NULL)
- return NULL;
+ if (pTable->GetPhysicalDeviceProcAddr == NULL) return NULL;
return pTable->GetPhysicalDeviceProcAddr(instance, funcName);
}
@@ -1342,8 +1332,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(core_instance_commands); i++) {
- if (!strcmp(core_instance_commands[i].name, name))
- return core_instance_commands[i].proc;
+ if (!strcmp(core_instance_commands[i].name, name)) return core_instance_commands[i].proc;
}
return nullptr;
@@ -1372,14 +1361,13 @@
};
for (size_t i = 0; i < ARRAY_SIZE(core_device_commands); i++) {
- if (!strcmp(core_device_commands[i].name, name))
- return core_device_commands[i].proc;
+ if (!strcmp(core_device_commands[i].name, name)) return core_device_commands[i].proc;
}
return nullptr;
}
-} // namespace image
+} // namespace image
// vk_layer_logging.h expects these to be defined
diff --git a/layers/image.h b/layers/image.h
index 1c10d50..727980a 100644
--- a/layers/image.h
+++ b/layers/image.h
@@ -27,24 +27,24 @@
// Image ERROR codes
enum IMAGE_ERROR {
- IMAGE_NONE, // Used for INFO & other non-error messages
- IMAGE_FORMAT_UNSUPPORTED, // Request to create Image or RenderPass with a format that is not supported
- IMAGE_RENDERPASS_INVALID_ATTACHMENT, // Invalid image layouts and/or load/storeOps for an attachment when creating RenderPass
- IMAGE_RENDERPASS_INVALID_DS_ATTACHMENT, // If no depth/stencil attachment for a RenderPass, verify that subpass DS attachment
- // is set to UNUSED
- IMAGE_INVALID_IMAGE_ASPECT, // Image aspect mask bits are invalid for this API call
- IMAGE_MISMATCHED_IMAGE_ASPECT, // Image aspect masks for source and dest images do not match
- IMAGE_VIEW_CREATE_ERROR, // Error occurred trying to create Image View
- IMAGE_MISMATCHED_IMAGE_TYPE, // Image types for source and dest images do not match
- IMAGE_MISMATCHED_IMAGE_FORMAT, // Image formats for source and dest images do not match
- IMAGE_INVALID_RESOLVE_SAMPLES, // Image resolve source samples less than two or dest samples greater than one
- IMAGE_INVALID_FORMAT, // Operation specifies an invalid format, or there is a format mismatch
- IMAGE_INVALID_FILTER, // Operation specifies an invalid filter setting
- IMAGE_INVALID_IMAGE_RESOURCE, // Image resource/subresource called with invalid setting
- IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, // Device limits for this format have been exceeded
- IMAGE_INVALID_LAYOUT, // Operation specifies an invalid layout
- IMAGE_INVALID_EXTENTS, // Operation specifies invalid image extents
- IMAGE_INVALID_USAGE, // Image was created without necessary usage for operation
+ IMAGE_NONE, // Used for INFO & other non-error messages
+ IMAGE_FORMAT_UNSUPPORTED, // Request to create Image or RenderPass with a format that is not supported
+ IMAGE_RENDERPASS_INVALID_ATTACHMENT, // Invalid image layouts and/or load/storeOps for an attachment when creating RenderPass
+ IMAGE_RENDERPASS_INVALID_DS_ATTACHMENT, // If no depth/stencil attachment for a RenderPass, verify that subpass DS attachment
+ // is set to UNUSED
+ IMAGE_INVALID_IMAGE_ASPECT, // Image aspect mask bits are invalid for this API call
+ IMAGE_MISMATCHED_IMAGE_ASPECT, // Image aspect masks for source and dest images do not match
+ IMAGE_VIEW_CREATE_ERROR, // Error occurred trying to create Image View
+ IMAGE_MISMATCHED_IMAGE_TYPE, // Image types for source and dest images do not match
+ IMAGE_MISMATCHED_IMAGE_FORMAT, // Image formats for source and dest images do not match
+ IMAGE_INVALID_RESOLVE_SAMPLES, // Image resolve source samples less than two or dest samples greater than one
+ IMAGE_INVALID_FORMAT, // Operation specifies an invalid format, or there is a format mismatch
+ IMAGE_INVALID_FILTER, // Operation specifies an invalid filter setting
+ IMAGE_INVALID_IMAGE_RESOURCE, // Image resource/subresource called with invalid setting
+ IMAGE_INVALID_FORMAT_LIMITS_VIOLATION, // Device limits for this format have been exceeded
+ IMAGE_INVALID_LAYOUT, // Operation specifies an invalid layout
+ IMAGE_INVALID_EXTENTS, // Operation specifies invalid image extents
+ IMAGE_INVALID_USAGE, // Image was created without necessary usage for operation
};
struct IMAGE_STATE {
@@ -57,12 +57,23 @@
VkImageCreateFlags flags;
VkImageUsageFlags usage;
IMAGE_STATE()
- : mipLevels(0), arraySize(0), format(VK_FORMAT_UNDEFINED), samples(VK_SAMPLE_COUNT_1_BIT),
- imageType(VK_IMAGE_TYPE_RANGE_SIZE), extent{}, flags(0), usage(0){};
+ : mipLevels(0),
+ arraySize(0),
+ format(VK_FORMAT_UNDEFINED),
+ samples(VK_SAMPLE_COUNT_1_BIT),
+ imageType(VK_IMAGE_TYPE_RANGE_SIZE),
+ extent{},
+ flags(0),
+ usage(0){};
IMAGE_STATE(const VkImageCreateInfo *pCreateInfo)
- : mipLevels(pCreateInfo->mipLevels), arraySize(pCreateInfo->arrayLayers), format(pCreateInfo->format),
- samples(pCreateInfo->samples), imageType(pCreateInfo->imageType), extent(pCreateInfo->extent), flags(pCreateInfo->flags),
+ : mipLevels(pCreateInfo->mipLevels),
+ arraySize(pCreateInfo->arrayLayers),
+ format(pCreateInfo->format),
+ samples(pCreateInfo->samples),
+ imageType(pCreateInfo->imageType),
+ extent(pCreateInfo->extent),
+ flags(pCreateInfo->flags),
usage(pCreateInfo->usage){};
};
-#endif // IMAGE_H
+#endif // IMAGE_H
diff --git a/layers/object_tracker.cpp b/layers/object_tracker.cpp
index e9a97bd..6b854be 100644
--- a/layers/object_tracker.cpp
+++ b/layers/object_tracker.cpp
@@ -49,7 +49,6 @@
static uint32_t loader_layer_if_version = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
static void InitObjectTracker(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
-
layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_object_tracker");
}
@@ -250,8 +249,14 @@
device_data->swapchainImageMap[reinterpret_cast<uint64_t &>(swapchain_image)] = pNewObjNode;
}
-template <typename T> uint64_t handle_value(T handle) { return reinterpret_cast<uint64_t &>(handle); }
-template <typename T> uint64_t handle_value(T *handle) { return reinterpret_cast<uint64_t>(handle); }
+template <typename T>
+uint64_t handle_value(T handle) {
+ return reinterpret_cast<uint64_t &>(handle);
+}
+template <typename T>
+uint64_t handle_value(T *handle) {
+ return reinterpret_cast<uint64_t>(handle);
+}
template <typename T1, typename T2>
static void CreateObject(T1 dispatchable_object, T2 object, VkDebugReportObjectTypeEXT object_type,
@@ -289,7 +294,6 @@
if (object_handle != VK_NULL_HANDLE) {
auto item = device_data->object_map[object_type].find(object_handle);
if (item != device_data->object_map[object_type].end()) {
-
OBJTRACK_NODE *pNode = item->second;
assert(device_data->num_total_objects > 0);
device_data->num_total_objects--;
@@ -451,7 +455,6 @@
}
VKAPI_ATTR void VKAPI_CALL DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
-
std::unique_lock<std::mutex> lock(global_lock);
ValidateObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, true, VALIDATION_ERROR_00052);
DestroyObject(device, device, VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, pAllocator, VALIDATION_ERROR_00050,
@@ -2797,7 +2800,7 @@
->GetPhysicalDeviceWin32PresentationSupportKHR(physicalDevice, queueFamilyIndex);
return result;
}
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
@@ -2837,7 +2840,7 @@
->GetPhysicalDeviceXcbPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection, visual_id);
return result;
}
-#endif // VK_USE_PLATFORM_XCB_KHR
+#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
@@ -2877,7 +2880,7 @@
->GetPhysicalDeviceXlibPresentationSupportKHR(physicalDevice, queueFamilyIndex, dpy, visualID);
return result;
}
-#endif // VK_USE_PLATFORM_XLIB_KHR
+#endif // VK_USE_PLATFORM_XLIB_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
@@ -2916,7 +2919,7 @@
->GetPhysicalDeviceMirPresentationSupportKHR(physicalDevice, queueFamilyIndex, connection);
return result;
}
-#endif // VK_USE_PLATFORM_MIR_KHR
+#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
@@ -2956,7 +2959,7 @@
->GetPhysicalDeviceWaylandPresentationSupportKHR(physicalDevice, queueFamilyIndex, display);
return result;
}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_ANDROID_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
@@ -2979,7 +2982,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+#endif // VK_USE_PLATFORM_ANDROID_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
const VkSwapchainCreateInfoKHR *pCreateInfos,
@@ -3049,8 +3052,8 @@
static const VkExtensionProperties instance_extensions[] = {{VK_EXT_DEBUG_REPORT_EXTENSION_NAME, VK_EXT_DEBUG_REPORT_SPEC_VERSION}};
static const VkLayerProperties globalLayerProps = {"VK_LAYER_LUNARG_object_tracker",
- VK_LAYER_API_VERSION, // specVersion
- 1, // implementationVersion
+ VK_LAYER_API_VERSION, // specVersion
+ 1, // implementationVersion
"LunarG Validation Layer"};
VKAPI_ATTR VkResult VKAPI_CALL EnumerateInstanceLayerProperties(uint32_t *pCount, VkLayerProperties *pProperties) {
@@ -3090,11 +3093,9 @@
static inline PFN_vkVoidFunction InterceptWsiEnabledCommand(const char *name, VkInstance instance) {
VkLayerInstanceDispatchTable *pTable = get_dispatch_table(ot_instance_table_map, instance);
- if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled)
- return nullptr;
+ if (instanceExtMap.size() == 0 || !instanceExtMap[pTable].wsi_enabled) return nullptr;
- if (!strcmp("vkDestroySurfaceKHR", name))
- return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR);
+ if (!strcmp("vkDestroySurfaceKHR", name)) return reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR);
if (!strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR);
if (!strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", name))
@@ -3111,35 +3112,35 @@
return reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR);
if ((instanceExtMap[pTable].win32_enabled == true) && !strcmp("vkGetPhysicalDeviceWin32PresentationSupportKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR);
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkCreateXcbSurfaceKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR);
if ((instanceExtMap[pTable].xcb_enabled == true) && !strcmp("vkGetPhysicalDeviceXcbPresentationSupportKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR);
-#endif // VK_USE_PLATFORM_XCB_KHR
+#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkCreateXlibSurfaceKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR);
if ((instanceExtMap[pTable].xlib_enabled == true) && !strcmp("vkGetPhysicalDeviceXlibPresentationSupportKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR);
-#endif // VK_USE_PLATFORM_XLIB_KHR
+#endif // VK_USE_PLATFORM_XLIB_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkCreateMirSurfaceKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR);
if ((instanceExtMap[pTable].mir_enabled == true) && !strcmp("vkGetPhysicalDeviceMirPresentationSupportKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR);
-#endif // VK_USE_PLATFORM_MIR_KHR
+#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkCreateWaylandSurfaceKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR);
if ((instanceExtMap[pTable].wayland_enabled == true) && !strcmp("vkGetPhysicalDeviceWaylandPresentationSupportKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR);
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_ANDROID_KHR
if ((instanceExtMap[pTable].android_enabled == true) && !strcmp("vkCreateAndroidSurfaceKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR);
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+#endif // VK_USE_PLATFORM_ANDROID_KHR
return nullptr;
}
@@ -3791,7 +3792,6 @@
VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage,
VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType,
VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties) {
-
bool skip_call = false;
{
std::lock_guard<std::mutex> lock(global_lock);
@@ -3951,7 +3951,7 @@
VkResult result = get_dispatch_table(ot_device_table_map, device)->GetMemoryWin32HandleNV(device, memory, handleType, pHandle);
return result;
}
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
// VK_AMD_draw_indirect_count Extension
VKAPI_ATTR void VKAPI_CALL CmdDrawIndirectCountAMD(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset,
@@ -4285,7 +4285,7 @@
return result;
}
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
// VK_EXT_display_surface_counter Extension
VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
@@ -4385,309 +4385,163 @@
}
static inline PFN_vkVoidFunction InterceptCoreDeviceCommand(const char *name) {
- if (!name || name[0] != 'v' || name[1] != 'k')
- return NULL;
+ if (!name || name[0] != 'v' || name[1] != 'k') return NULL;
name += 2;
- if (!strcmp(name, "GetDeviceProcAddr"))
- return (PFN_vkVoidFunction)GetDeviceProcAddr;
- if (!strcmp(name, "DestroyDevice"))
- return (PFN_vkVoidFunction)DestroyDevice;
- if (!strcmp(name, "GetDeviceQueue"))
- return (PFN_vkVoidFunction)GetDeviceQueue;
- if (!strcmp(name, "QueueSubmit"))
- return (PFN_vkVoidFunction)QueueSubmit;
- if (!strcmp(name, "QueueWaitIdle"))
- return (PFN_vkVoidFunction)QueueWaitIdle;
- if (!strcmp(name, "DeviceWaitIdle"))
- return (PFN_vkVoidFunction)DeviceWaitIdle;
- if (!strcmp(name, "AllocateMemory"))
- return (PFN_vkVoidFunction)AllocateMemory;
- if (!strcmp(name, "FreeMemory"))
- return (PFN_vkVoidFunction)FreeMemory;
- if (!strcmp(name, "MapMemory"))
- return (PFN_vkVoidFunction)MapMemory;
- if (!strcmp(name, "UnmapMemory"))
- return (PFN_vkVoidFunction)UnmapMemory;
- if (!strcmp(name, "FlushMappedMemoryRanges"))
- return (PFN_vkVoidFunction)FlushMappedMemoryRanges;
- if (!strcmp(name, "InvalidateMappedMemoryRanges"))
- return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges;
- if (!strcmp(name, "GetDeviceMemoryCommitment"))
- return (PFN_vkVoidFunction)GetDeviceMemoryCommitment;
- if (!strcmp(name, "BindBufferMemory"))
- return (PFN_vkVoidFunction)BindBufferMemory;
- if (!strcmp(name, "BindImageMemory"))
- return (PFN_vkVoidFunction)BindImageMemory;
- if (!strcmp(name, "GetBufferMemoryRequirements"))
- return (PFN_vkVoidFunction)GetBufferMemoryRequirements;
- if (!strcmp(name, "GetImageMemoryRequirements"))
- return (PFN_vkVoidFunction)GetImageMemoryRequirements;
- if (!strcmp(name, "GetImageSparseMemoryRequirements"))
- return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements;
- if (!strcmp(name, "QueueBindSparse"))
- return (PFN_vkVoidFunction)QueueBindSparse;
- if (!strcmp(name, "CreateFence"))
- return (PFN_vkVoidFunction)CreateFence;
- if (!strcmp(name, "DestroyFence"))
- return (PFN_vkVoidFunction)DestroyFence;
- if (!strcmp(name, "ResetFences"))
- return (PFN_vkVoidFunction)ResetFences;
- if (!strcmp(name, "GetFenceStatus"))
- return (PFN_vkVoidFunction)GetFenceStatus;
- if (!strcmp(name, "WaitForFences"))
- return (PFN_vkVoidFunction)WaitForFences;
- if (!strcmp(name, "CreateSemaphore"))
- return (PFN_vkVoidFunction)CreateSemaphore;
- if (!strcmp(name, "DestroySemaphore"))
- return (PFN_vkVoidFunction)DestroySemaphore;
- if (!strcmp(name, "CreateEvent"))
- return (PFN_vkVoidFunction)CreateEvent;
- if (!strcmp(name, "DestroyEvent"))
- return (PFN_vkVoidFunction)DestroyEvent;
- if (!strcmp(name, "GetEventStatus"))
- return (PFN_vkVoidFunction)GetEventStatus;
- if (!strcmp(name, "SetEvent"))
- return (PFN_vkVoidFunction)SetEvent;
- if (!strcmp(name, "ResetEvent"))
- return (PFN_vkVoidFunction)ResetEvent;
- if (!strcmp(name, "CreateQueryPool"))
- return (PFN_vkVoidFunction)CreateQueryPool;
- if (!strcmp(name, "DestroyQueryPool"))
- return (PFN_vkVoidFunction)DestroyQueryPool;
- if (!strcmp(name, "GetQueryPoolResults"))
- return (PFN_vkVoidFunction)GetQueryPoolResults;
- if (!strcmp(name, "CreateBuffer"))
- return (PFN_vkVoidFunction)CreateBuffer;
- if (!strcmp(name, "DestroyBuffer"))
- return (PFN_vkVoidFunction)DestroyBuffer;
- if (!strcmp(name, "CreateBufferView"))
- return (PFN_vkVoidFunction)CreateBufferView;
- if (!strcmp(name, "DestroyBufferView"))
- return (PFN_vkVoidFunction)DestroyBufferView;
- if (!strcmp(name, "CreateImage"))
- return (PFN_vkVoidFunction)CreateImage;
- if (!strcmp(name, "DestroyImage"))
- return (PFN_vkVoidFunction)DestroyImage;
- if (!strcmp(name, "GetImageSubresourceLayout"))
- return (PFN_vkVoidFunction)GetImageSubresourceLayout;
- if (!strcmp(name, "CreateImageView"))
- return (PFN_vkVoidFunction)CreateImageView;
- if (!strcmp(name, "DestroyImageView"))
- return (PFN_vkVoidFunction)DestroyImageView;
- if (!strcmp(name, "CreateShaderModule"))
- return (PFN_vkVoidFunction)CreateShaderModule;
- if (!strcmp(name, "DestroyShaderModule"))
- return (PFN_vkVoidFunction)DestroyShaderModule;
- if (!strcmp(name, "CreatePipelineCache"))
- return (PFN_vkVoidFunction)CreatePipelineCache;
- if (!strcmp(name, "DestroyPipelineCache"))
- return (PFN_vkVoidFunction)DestroyPipelineCache;
- if (!strcmp(name, "GetPipelineCacheData"))
- return (PFN_vkVoidFunction)GetPipelineCacheData;
- if (!strcmp(name, "MergePipelineCaches"))
- return (PFN_vkVoidFunction)MergePipelineCaches;
- if (!strcmp(name, "CreateGraphicsPipelines"))
- return (PFN_vkVoidFunction)CreateGraphicsPipelines;
- if (!strcmp(name, "CreateComputePipelines"))
- return (PFN_vkVoidFunction)CreateComputePipelines;
- if (!strcmp(name, "DestroyPipeline"))
- return (PFN_vkVoidFunction)DestroyPipeline;
- if (!strcmp(name, "CreatePipelineLayout"))
- return (PFN_vkVoidFunction)CreatePipelineLayout;
- if (!strcmp(name, "DestroyPipelineLayout"))
- return (PFN_vkVoidFunction)DestroyPipelineLayout;
- if (!strcmp(name, "CreateSampler"))
- return (PFN_vkVoidFunction)CreateSampler;
- if (!strcmp(name, "DestroySampler"))
- return (PFN_vkVoidFunction)DestroySampler;
- if (!strcmp(name, "CreateDescriptorSetLayout"))
- return (PFN_vkVoidFunction)CreateDescriptorSetLayout;
- if (!strcmp(name, "DestroyDescriptorSetLayout"))
- return (PFN_vkVoidFunction)DestroyDescriptorSetLayout;
- if (!strcmp(name, "CreateDescriptorPool"))
- return (PFN_vkVoidFunction)CreateDescriptorPool;
- if (!strcmp(name, "DestroyDescriptorPool"))
- return (PFN_vkVoidFunction)DestroyDescriptorPool;
- if (!strcmp(name, "ResetDescriptorPool"))
- return (PFN_vkVoidFunction)ResetDescriptorPool;
- if (!strcmp(name, "AllocateDescriptorSets"))
- return (PFN_vkVoidFunction)AllocateDescriptorSets;
- if (!strcmp(name, "FreeDescriptorSets"))
- return (PFN_vkVoidFunction)FreeDescriptorSets;
- if (!strcmp(name, "UpdateDescriptorSets"))
- return (PFN_vkVoidFunction)UpdateDescriptorSets;
- if (!strcmp(name, "CreateFramebuffer"))
- return (PFN_vkVoidFunction)CreateFramebuffer;
- if (!strcmp(name, "DestroyFramebuffer"))
- return (PFN_vkVoidFunction)DestroyFramebuffer;
- if (!strcmp(name, "CreateRenderPass"))
- return (PFN_vkVoidFunction)CreateRenderPass;
- if (!strcmp(name, "DestroyRenderPass"))
- return (PFN_vkVoidFunction)DestroyRenderPass;
- if (!strcmp(name, "GetRenderAreaGranularity"))
- return (PFN_vkVoidFunction)GetRenderAreaGranularity;
- if (!strcmp(name, "CreateCommandPool"))
- return (PFN_vkVoidFunction)CreateCommandPool;
- if (!strcmp(name, "DestroyCommandPool"))
- return (PFN_vkVoidFunction)DestroyCommandPool;
- if (!strcmp(name, "ResetCommandPool"))
- return (PFN_vkVoidFunction)ResetCommandPool;
- if (!strcmp(name, "AllocateCommandBuffers"))
- return (PFN_vkVoidFunction)AllocateCommandBuffers;
- if (!strcmp(name, "FreeCommandBuffers"))
- return (PFN_vkVoidFunction)FreeCommandBuffers;
- if (!strcmp(name, "BeginCommandBuffer"))
- return (PFN_vkVoidFunction)BeginCommandBuffer;
- if (!strcmp(name, "EndCommandBuffer"))
- return (PFN_vkVoidFunction)EndCommandBuffer;
- if (!strcmp(name, "ResetCommandBuffer"))
- return (PFN_vkVoidFunction)ResetCommandBuffer;
- if (!strcmp(name, "CmdBindPipeline"))
- return (PFN_vkVoidFunction)CmdBindPipeline;
- if (!strcmp(name, "CmdSetViewport"))
- return (PFN_vkVoidFunction)CmdSetViewport;
- if (!strcmp(name, "CmdSetScissor"))
- return (PFN_vkVoidFunction)CmdSetScissor;
- if (!strcmp(name, "CmdSetLineWidth"))
- return (PFN_vkVoidFunction)CmdSetLineWidth;
- if (!strcmp(name, "CmdSetDepthBias"))
- return (PFN_vkVoidFunction)CmdSetDepthBias;
- if (!strcmp(name, "CmdSetBlendConstants"))
- return (PFN_vkVoidFunction)CmdSetBlendConstants;
- if (!strcmp(name, "CmdSetDepthBounds"))
- return (PFN_vkVoidFunction)CmdSetDepthBounds;
- if (!strcmp(name, "CmdSetStencilCompareMask"))
- return (PFN_vkVoidFunction)CmdSetStencilCompareMask;
- if (!strcmp(name, "CmdSetStencilWriteMask"))
- return (PFN_vkVoidFunction)CmdSetStencilWriteMask;
- if (!strcmp(name, "CmdSetStencilReference"))
- return (PFN_vkVoidFunction)CmdSetStencilReference;
- if (!strcmp(name, "CmdBindDescriptorSets"))
- return (PFN_vkVoidFunction)CmdBindDescriptorSets;
- if (!strcmp(name, "CmdBindIndexBuffer"))
- return (PFN_vkVoidFunction)CmdBindIndexBuffer;
- if (!strcmp(name, "CmdBindVertexBuffers"))
- return (PFN_vkVoidFunction)CmdBindVertexBuffers;
- if (!strcmp(name, "CmdDraw"))
- return (PFN_vkVoidFunction)CmdDraw;
- if (!strcmp(name, "CmdDrawIndexed"))
- return (PFN_vkVoidFunction)CmdDrawIndexed;
- if (!strcmp(name, "CmdDrawIndirect"))
- return (PFN_vkVoidFunction)CmdDrawIndirect;
- if (!strcmp(name, "CmdDrawIndexedIndirect"))
- return (PFN_vkVoidFunction)CmdDrawIndexedIndirect;
- if (!strcmp(name, "CmdDispatch"))
- return (PFN_vkVoidFunction)CmdDispatch;
- if (!strcmp(name, "CmdDispatchIndirect"))
- return (PFN_vkVoidFunction)CmdDispatchIndirect;
- if (!strcmp(name, "CmdCopyBuffer"))
- return (PFN_vkVoidFunction)CmdCopyBuffer;
- if (!strcmp(name, "CmdCopyImage"))
- return (PFN_vkVoidFunction)CmdCopyImage;
- if (!strcmp(name, "CmdBlitImage"))
- return (PFN_vkVoidFunction)CmdBlitImage;
- if (!strcmp(name, "CmdCopyBufferToImage"))
- return (PFN_vkVoidFunction)CmdCopyBufferToImage;
- if (!strcmp(name, "CmdCopyImageToBuffer"))
- return (PFN_vkVoidFunction)CmdCopyImageToBuffer;
- if (!strcmp(name, "CmdUpdateBuffer"))
- return (PFN_vkVoidFunction)CmdUpdateBuffer;
- if (!strcmp(name, "CmdFillBuffer"))
- return (PFN_vkVoidFunction)CmdFillBuffer;
- if (!strcmp(name, "CmdClearColorImage"))
- return (PFN_vkVoidFunction)CmdClearColorImage;
- if (!strcmp(name, "CmdClearDepthStencilImage"))
- return (PFN_vkVoidFunction)CmdClearDepthStencilImage;
- if (!strcmp(name, "CmdClearAttachments"))
- return (PFN_vkVoidFunction)CmdClearAttachments;
- if (!strcmp(name, "CmdResolveImage"))
- return (PFN_vkVoidFunction)CmdResolveImage;
- if (!strcmp(name, "CmdSetEvent"))
- return (PFN_vkVoidFunction)CmdSetEvent;
- if (!strcmp(name, "CmdResetEvent"))
- return (PFN_vkVoidFunction)CmdResetEvent;
- if (!strcmp(name, "CmdWaitEvents"))
- return (PFN_vkVoidFunction)CmdWaitEvents;
- if (!strcmp(name, "CmdPipelineBarrier"))
- return (PFN_vkVoidFunction)CmdPipelineBarrier;
- if (!strcmp(name, "CmdBeginQuery"))
- return (PFN_vkVoidFunction)CmdBeginQuery;
- if (!strcmp(name, "CmdEndQuery"))
- return (PFN_vkVoidFunction)CmdEndQuery;
- if (!strcmp(name, "CmdResetQueryPool"))
- return (PFN_vkVoidFunction)CmdResetQueryPool;
- if (!strcmp(name, "CmdWriteTimestamp"))
- return (PFN_vkVoidFunction)CmdWriteTimestamp;
- if (!strcmp(name, "CmdCopyQueryPoolResults"))
- return (PFN_vkVoidFunction)CmdCopyQueryPoolResults;
- if (!strcmp(name, "CmdPushConstants"))
- return (PFN_vkVoidFunction)CmdPushConstants;
- if (!strcmp(name, "CmdBeginRenderPass"))
- return (PFN_vkVoidFunction)CmdBeginRenderPass;
- if (!strcmp(name, "CmdNextSubpass"))
- return (PFN_vkVoidFunction)CmdNextSubpass;
- if (!strcmp(name, "CmdEndRenderPass"))
- return (PFN_vkVoidFunction)CmdEndRenderPass;
- if (!strcmp(name, "CmdExecuteCommands"))
- return (PFN_vkVoidFunction)CmdExecuteCommands;
- if (!strcmp(name, "DebugMarkerSetObjectTagEXT"))
- return (PFN_vkVoidFunction)DebugMarkerSetObjectTagEXT;
- if (!strcmp(name, "DebugMarkerSetObjectNameEXT"))
- return (PFN_vkVoidFunction)DebugMarkerSetObjectNameEXT;
- if (!strcmp(name, "CmdDebugMarkerBeginEXT"))
- return (PFN_vkVoidFunction)CmdDebugMarkerBeginEXT;
- if (!strcmp(name, "CmdDebugMarkerEndEXT"))
- return (PFN_vkVoidFunction)CmdDebugMarkerEndEXT;
- if (!strcmp(name, "CmdDebugMarkerInsertEXT"))
- return (PFN_vkVoidFunction)CmdDebugMarkerInsertEXT;
+ if (!strcmp(name, "GetDeviceProcAddr")) return (PFN_vkVoidFunction)GetDeviceProcAddr;
+ if (!strcmp(name, "DestroyDevice")) return (PFN_vkVoidFunction)DestroyDevice;
+ if (!strcmp(name, "GetDeviceQueue")) return (PFN_vkVoidFunction)GetDeviceQueue;
+ if (!strcmp(name, "QueueSubmit")) return (PFN_vkVoidFunction)QueueSubmit;
+ if (!strcmp(name, "QueueWaitIdle")) return (PFN_vkVoidFunction)QueueWaitIdle;
+ if (!strcmp(name, "DeviceWaitIdle")) return (PFN_vkVoidFunction)DeviceWaitIdle;
+ if (!strcmp(name, "AllocateMemory")) return (PFN_vkVoidFunction)AllocateMemory;
+ if (!strcmp(name, "FreeMemory")) return (PFN_vkVoidFunction)FreeMemory;
+ if (!strcmp(name, "MapMemory")) return (PFN_vkVoidFunction)MapMemory;
+ if (!strcmp(name, "UnmapMemory")) return (PFN_vkVoidFunction)UnmapMemory;
+ if (!strcmp(name, "FlushMappedMemoryRanges")) return (PFN_vkVoidFunction)FlushMappedMemoryRanges;
+ if (!strcmp(name, "InvalidateMappedMemoryRanges")) return (PFN_vkVoidFunction)InvalidateMappedMemoryRanges;
+ if (!strcmp(name, "GetDeviceMemoryCommitment")) return (PFN_vkVoidFunction)GetDeviceMemoryCommitment;
+ if (!strcmp(name, "BindBufferMemory")) return (PFN_vkVoidFunction)BindBufferMemory;
+ if (!strcmp(name, "BindImageMemory")) return (PFN_vkVoidFunction)BindImageMemory;
+ if (!strcmp(name, "GetBufferMemoryRequirements")) return (PFN_vkVoidFunction)GetBufferMemoryRequirements;
+ if (!strcmp(name, "GetImageMemoryRequirements")) return (PFN_vkVoidFunction)GetImageMemoryRequirements;
+ if (!strcmp(name, "GetImageSparseMemoryRequirements")) return (PFN_vkVoidFunction)GetImageSparseMemoryRequirements;
+ if (!strcmp(name, "QueueBindSparse")) return (PFN_vkVoidFunction)QueueBindSparse;
+ if (!strcmp(name, "CreateFence")) return (PFN_vkVoidFunction)CreateFence;
+ if (!strcmp(name, "DestroyFence")) return (PFN_vkVoidFunction)DestroyFence;
+ if (!strcmp(name, "ResetFences")) return (PFN_vkVoidFunction)ResetFences;
+ if (!strcmp(name, "GetFenceStatus")) return (PFN_vkVoidFunction)GetFenceStatus;
+ if (!strcmp(name, "WaitForFences")) return (PFN_vkVoidFunction)WaitForFences;
+ if (!strcmp(name, "CreateSemaphore")) return (PFN_vkVoidFunction)CreateSemaphore;
+ if (!strcmp(name, "DestroySemaphore")) return (PFN_vkVoidFunction)DestroySemaphore;
+ if (!strcmp(name, "CreateEvent")) return (PFN_vkVoidFunction)CreateEvent;
+ if (!strcmp(name, "DestroyEvent")) return (PFN_vkVoidFunction)DestroyEvent;
+ if (!strcmp(name, "GetEventStatus")) return (PFN_vkVoidFunction)GetEventStatus;
+ if (!strcmp(name, "SetEvent")) return (PFN_vkVoidFunction)SetEvent;
+ if (!strcmp(name, "ResetEvent")) return (PFN_vkVoidFunction)ResetEvent;
+ if (!strcmp(name, "CreateQueryPool")) return (PFN_vkVoidFunction)CreateQueryPool;
+ if (!strcmp(name, "DestroyQueryPool")) return (PFN_vkVoidFunction)DestroyQueryPool;
+ if (!strcmp(name, "GetQueryPoolResults")) return (PFN_vkVoidFunction)GetQueryPoolResults;
+ if (!strcmp(name, "CreateBuffer")) return (PFN_vkVoidFunction)CreateBuffer;
+ if (!strcmp(name, "DestroyBuffer")) return (PFN_vkVoidFunction)DestroyBuffer;
+ if (!strcmp(name, "CreateBufferView")) return (PFN_vkVoidFunction)CreateBufferView;
+ if (!strcmp(name, "DestroyBufferView")) return (PFN_vkVoidFunction)DestroyBufferView;
+ if (!strcmp(name, "CreateImage")) return (PFN_vkVoidFunction)CreateImage;
+ if (!strcmp(name, "DestroyImage")) return (PFN_vkVoidFunction)DestroyImage;
+ if (!strcmp(name, "GetImageSubresourceLayout")) return (PFN_vkVoidFunction)GetImageSubresourceLayout;
+ if (!strcmp(name, "CreateImageView")) return (PFN_vkVoidFunction)CreateImageView;
+ if (!strcmp(name, "DestroyImageView")) return (PFN_vkVoidFunction)DestroyImageView;
+ if (!strcmp(name, "CreateShaderModule")) return (PFN_vkVoidFunction)CreateShaderModule;
+ if (!strcmp(name, "DestroyShaderModule")) return (PFN_vkVoidFunction)DestroyShaderModule;
+ if (!strcmp(name, "CreatePipelineCache")) return (PFN_vkVoidFunction)CreatePipelineCache;
+ if (!strcmp(name, "DestroyPipelineCache")) return (PFN_vkVoidFunction)DestroyPipelineCache;
+ if (!strcmp(name, "GetPipelineCacheData")) return (PFN_vkVoidFunction)GetPipelineCacheData;
+ if (!strcmp(name, "MergePipelineCaches")) return (PFN_vkVoidFunction)MergePipelineCaches;
+ if (!strcmp(name, "CreateGraphicsPipelines")) return (PFN_vkVoidFunction)CreateGraphicsPipelines;
+ if (!strcmp(name, "CreateComputePipelines")) return (PFN_vkVoidFunction)CreateComputePipelines;
+ if (!strcmp(name, "DestroyPipeline")) return (PFN_vkVoidFunction)DestroyPipeline;
+ if (!strcmp(name, "CreatePipelineLayout")) return (PFN_vkVoidFunction)CreatePipelineLayout;
+ if (!strcmp(name, "DestroyPipelineLayout")) return (PFN_vkVoidFunction)DestroyPipelineLayout;
+ if (!strcmp(name, "CreateSampler")) return (PFN_vkVoidFunction)CreateSampler;
+ if (!strcmp(name, "DestroySampler")) return (PFN_vkVoidFunction)DestroySampler;
+ if (!strcmp(name, "CreateDescriptorSetLayout")) return (PFN_vkVoidFunction)CreateDescriptorSetLayout;
+ if (!strcmp(name, "DestroyDescriptorSetLayout")) return (PFN_vkVoidFunction)DestroyDescriptorSetLayout;
+ if (!strcmp(name, "CreateDescriptorPool")) return (PFN_vkVoidFunction)CreateDescriptorPool;
+ if (!strcmp(name, "DestroyDescriptorPool")) return (PFN_vkVoidFunction)DestroyDescriptorPool;
+ if (!strcmp(name, "ResetDescriptorPool")) return (PFN_vkVoidFunction)ResetDescriptorPool;
+ if (!strcmp(name, "AllocateDescriptorSets")) return (PFN_vkVoidFunction)AllocateDescriptorSets;
+ if (!strcmp(name, "FreeDescriptorSets")) return (PFN_vkVoidFunction)FreeDescriptorSets;
+ if (!strcmp(name, "UpdateDescriptorSets")) return (PFN_vkVoidFunction)UpdateDescriptorSets;
+ if (!strcmp(name, "CreateFramebuffer")) return (PFN_vkVoidFunction)CreateFramebuffer;
+ if (!strcmp(name, "DestroyFramebuffer")) return (PFN_vkVoidFunction)DestroyFramebuffer;
+ if (!strcmp(name, "CreateRenderPass")) return (PFN_vkVoidFunction)CreateRenderPass;
+ if (!strcmp(name, "DestroyRenderPass")) return (PFN_vkVoidFunction)DestroyRenderPass;
+ if (!strcmp(name, "GetRenderAreaGranularity")) return (PFN_vkVoidFunction)GetRenderAreaGranularity;
+ if (!strcmp(name, "CreateCommandPool")) return (PFN_vkVoidFunction)CreateCommandPool;
+ if (!strcmp(name, "DestroyCommandPool")) return (PFN_vkVoidFunction)DestroyCommandPool;
+ if (!strcmp(name, "ResetCommandPool")) return (PFN_vkVoidFunction)ResetCommandPool;
+ if (!strcmp(name, "AllocateCommandBuffers")) return (PFN_vkVoidFunction)AllocateCommandBuffers;
+ if (!strcmp(name, "FreeCommandBuffers")) return (PFN_vkVoidFunction)FreeCommandBuffers;
+ if (!strcmp(name, "BeginCommandBuffer")) return (PFN_vkVoidFunction)BeginCommandBuffer;
+ if (!strcmp(name, "EndCommandBuffer")) return (PFN_vkVoidFunction)EndCommandBuffer;
+ if (!strcmp(name, "ResetCommandBuffer")) return (PFN_vkVoidFunction)ResetCommandBuffer;
+ if (!strcmp(name, "CmdBindPipeline")) return (PFN_vkVoidFunction)CmdBindPipeline;
+ if (!strcmp(name, "CmdSetViewport")) return (PFN_vkVoidFunction)CmdSetViewport;
+ if (!strcmp(name, "CmdSetScissor")) return (PFN_vkVoidFunction)CmdSetScissor;
+ if (!strcmp(name, "CmdSetLineWidth")) return (PFN_vkVoidFunction)CmdSetLineWidth;
+ if (!strcmp(name, "CmdSetDepthBias")) return (PFN_vkVoidFunction)CmdSetDepthBias;
+ if (!strcmp(name, "CmdSetBlendConstants")) return (PFN_vkVoidFunction)CmdSetBlendConstants;
+ if (!strcmp(name, "CmdSetDepthBounds")) return (PFN_vkVoidFunction)CmdSetDepthBounds;
+ if (!strcmp(name, "CmdSetStencilCompareMask")) return (PFN_vkVoidFunction)CmdSetStencilCompareMask;
+ if (!strcmp(name, "CmdSetStencilWriteMask")) return (PFN_vkVoidFunction)CmdSetStencilWriteMask;
+ if (!strcmp(name, "CmdSetStencilReference")) return (PFN_vkVoidFunction)CmdSetStencilReference;
+ if (!strcmp(name, "CmdBindDescriptorSets")) return (PFN_vkVoidFunction)CmdBindDescriptorSets;
+ if (!strcmp(name, "CmdBindIndexBuffer")) return (PFN_vkVoidFunction)CmdBindIndexBuffer;
+ if (!strcmp(name, "CmdBindVertexBuffers")) return (PFN_vkVoidFunction)CmdBindVertexBuffers;
+ if (!strcmp(name, "CmdDraw")) return (PFN_vkVoidFunction)CmdDraw;
+ if (!strcmp(name, "CmdDrawIndexed")) return (PFN_vkVoidFunction)CmdDrawIndexed;
+ if (!strcmp(name, "CmdDrawIndirect")) return (PFN_vkVoidFunction)CmdDrawIndirect;
+ if (!strcmp(name, "CmdDrawIndexedIndirect")) return (PFN_vkVoidFunction)CmdDrawIndexedIndirect;
+ if (!strcmp(name, "CmdDispatch")) return (PFN_vkVoidFunction)CmdDispatch;
+ if (!strcmp(name, "CmdDispatchIndirect")) return (PFN_vkVoidFunction)CmdDispatchIndirect;
+ if (!strcmp(name, "CmdCopyBuffer")) return (PFN_vkVoidFunction)CmdCopyBuffer;
+ if (!strcmp(name, "CmdCopyImage")) return (PFN_vkVoidFunction)CmdCopyImage;
+ if (!strcmp(name, "CmdBlitImage")) return (PFN_vkVoidFunction)CmdBlitImage;
+ if (!strcmp(name, "CmdCopyBufferToImage")) return (PFN_vkVoidFunction)CmdCopyBufferToImage;
+ if (!strcmp(name, "CmdCopyImageToBuffer")) return (PFN_vkVoidFunction)CmdCopyImageToBuffer;
+ if (!strcmp(name, "CmdUpdateBuffer")) return (PFN_vkVoidFunction)CmdUpdateBuffer;
+ if (!strcmp(name, "CmdFillBuffer")) return (PFN_vkVoidFunction)CmdFillBuffer;
+ if (!strcmp(name, "CmdClearColorImage")) return (PFN_vkVoidFunction)CmdClearColorImage;
+ if (!strcmp(name, "CmdClearDepthStencilImage")) return (PFN_vkVoidFunction)CmdClearDepthStencilImage;
+ if (!strcmp(name, "CmdClearAttachments")) return (PFN_vkVoidFunction)CmdClearAttachments;
+ if (!strcmp(name, "CmdResolveImage")) return (PFN_vkVoidFunction)CmdResolveImage;
+ if (!strcmp(name, "CmdSetEvent")) return (PFN_vkVoidFunction)CmdSetEvent;
+ if (!strcmp(name, "CmdResetEvent")) return (PFN_vkVoidFunction)CmdResetEvent;
+ if (!strcmp(name, "CmdWaitEvents")) return (PFN_vkVoidFunction)CmdWaitEvents;
+ if (!strcmp(name, "CmdPipelineBarrier")) return (PFN_vkVoidFunction)CmdPipelineBarrier;
+ if (!strcmp(name, "CmdBeginQuery")) return (PFN_vkVoidFunction)CmdBeginQuery;
+ if (!strcmp(name, "CmdEndQuery")) return (PFN_vkVoidFunction)CmdEndQuery;
+ if (!strcmp(name, "CmdResetQueryPool")) return (PFN_vkVoidFunction)CmdResetQueryPool;
+ if (!strcmp(name, "CmdWriteTimestamp")) return (PFN_vkVoidFunction)CmdWriteTimestamp;
+ if (!strcmp(name, "CmdCopyQueryPoolResults")) return (PFN_vkVoidFunction)CmdCopyQueryPoolResults;
+ if (!strcmp(name, "CmdPushConstants")) return (PFN_vkVoidFunction)CmdPushConstants;
+ if (!strcmp(name, "CmdBeginRenderPass")) return (PFN_vkVoidFunction)CmdBeginRenderPass;
+ if (!strcmp(name, "CmdNextSubpass")) return (PFN_vkVoidFunction)CmdNextSubpass;
+ if (!strcmp(name, "CmdEndRenderPass")) return (PFN_vkVoidFunction)CmdEndRenderPass;
+ if (!strcmp(name, "CmdExecuteCommands")) return (PFN_vkVoidFunction)CmdExecuteCommands;
+ if (!strcmp(name, "DebugMarkerSetObjectTagEXT")) return (PFN_vkVoidFunction)DebugMarkerSetObjectTagEXT;
+ if (!strcmp(name, "DebugMarkerSetObjectNameEXT")) return (PFN_vkVoidFunction)DebugMarkerSetObjectNameEXT;
+ if (!strcmp(name, "CmdDebugMarkerBeginEXT")) return (PFN_vkVoidFunction)CmdDebugMarkerBeginEXT;
+ if (!strcmp(name, "CmdDebugMarkerEndEXT")) return (PFN_vkVoidFunction)CmdDebugMarkerEndEXT;
+ if (!strcmp(name, "CmdDebugMarkerInsertEXT")) return (PFN_vkVoidFunction)CmdDebugMarkerInsertEXT;
#ifdef VK_USE_PLATFORM_WIN32_KHR
- if (!strcmp(name, "GetMemoryWin32HandleNV"))
- return (PFN_vkVoidFunction)GetMemoryWin32HandleNV;
-#endif // VK_USE_PLATFORM_WIN32_KHR
- if (!strcmp(name, "CmdDrawIndirectCountAMD"))
- return (PFN_vkVoidFunction)CmdDrawIndirectCountAMD;
- if (!strcmp(name, "CmdDrawIndexedIndirectCountAMD"))
- return (PFN_vkVoidFunction)CmdDrawIndexedIndirectCountAMD;
+ if (!strcmp(name, "GetMemoryWin32HandleNV")) return (PFN_vkVoidFunction)GetMemoryWin32HandleNV;
+#endif // VK_USE_PLATFORM_WIN32_KHR
+ if (!strcmp(name, "CmdDrawIndirectCountAMD")) return (PFN_vkVoidFunction)CmdDrawIndirectCountAMD;
+ if (!strcmp(name, "CmdDrawIndexedIndirectCountAMD")) return (PFN_vkVoidFunction)CmdDrawIndexedIndirectCountAMD;
return NULL;
}
static inline PFN_vkVoidFunction InterceptCoreInstanceCommand(const char *name) {
- if (!name || name[0] != 'v' || name[1] != 'k')
- return NULL;
+ if (!name || name[0] != 'v' || name[1] != 'k') return NULL;
name += 2;
- if (!strcmp(name, "CreateInstance"))
- return (PFN_vkVoidFunction)CreateInstance;
- if (!strcmp(name, "DestroyInstance"))
- return (PFN_vkVoidFunction)DestroyInstance;
- if (!strcmp(name, "EnumeratePhysicalDevices"))
- return (PFN_vkVoidFunction)EnumeratePhysicalDevices;
- if (!strcmp(name, "_layerGetPhysicalDeviceProcAddr"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceProcAddr;
- if (!strcmp(name, "GetPhysicalDeviceFeatures"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures;
- if (!strcmp(name, "GetPhysicalDeviceFormatProperties"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
- if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties;
- if (!strcmp(name, "GetPhysicalDeviceProperties"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
- if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties;
- if (!strcmp(name, "GetPhysicalDeviceMemoryProperties"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties;
- if (!strcmp(name, "GetInstanceProcAddr"))
- return (PFN_vkVoidFunction)GetInstanceProcAddr;
- if (!strcmp(name, "CreateDevice"))
- return (PFN_vkVoidFunction)CreateDevice;
- if (!strcmp(name, "EnumerateInstanceExtensionProperties"))
- return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
- if (!strcmp(name, "EnumerateInstanceLayerProperties"))
- return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
- if (!strcmp(name, "EnumerateDeviceLayerProperties"))
- return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
+ if (!strcmp(name, "CreateInstance")) return (PFN_vkVoidFunction)CreateInstance;
+ if (!strcmp(name, "DestroyInstance")) return (PFN_vkVoidFunction)DestroyInstance;
+ if (!strcmp(name, "EnumeratePhysicalDevices")) return (PFN_vkVoidFunction)EnumeratePhysicalDevices;
+ if (!strcmp(name, "_layerGetPhysicalDeviceProcAddr")) return (PFN_vkVoidFunction)GetPhysicalDeviceProcAddr;
+ if (!strcmp(name, "GetPhysicalDeviceFeatures")) return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures;
+ if (!strcmp(name, "GetPhysicalDeviceFormatProperties")) return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties;
+ if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties")) return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties;
+ if (!strcmp(name, "GetPhysicalDeviceProperties")) return (PFN_vkVoidFunction)GetPhysicalDeviceProperties;
+ if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties")) return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties;
+ if (!strcmp(name, "GetPhysicalDeviceMemoryProperties")) return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties;
+ if (!strcmp(name, "GetInstanceProcAddr")) return (PFN_vkVoidFunction)GetInstanceProcAddr;
+ if (!strcmp(name, "CreateDevice")) return (PFN_vkVoidFunction)CreateDevice;
+ if (!strcmp(name, "EnumerateInstanceExtensionProperties")) return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
+ if (!strcmp(name, "EnumerateInstanceLayerProperties")) return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
+ if (!strcmp(name, "EnumerateDeviceLayerProperties")) return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties"))
return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties;
if (!strcmp(name, "GetPhysicalDeviceExternalImageFormatPropertiesNV"))
@@ -4700,38 +4554,25 @@
if (device) {
layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
- if (!name || name[0] != 'v' || name[1] != 'k')
- return NULL;
+ if (!name || name[0] != 'v' || name[1] != 'k') return NULL;
name += 2;
if (device_data->nvx_device_generated_commands_enabled) {
- if (!strcmp(name, "CmdProcessCommandsNVX"))
- return (PFN_vkVoidFunction)CmdProcessCommandsNVX;
- if (!strcmp(name, "CmdReserveSpaceForCommandsNVX"))
- return (PFN_vkVoidFunction)CmdReserveSpaceForCommandsNVX;
- if (!strcmp(name, "CreateIndirectCommandsLayoutNVX"))
- return (PFN_vkVoidFunction)CreateIndirectCommandsLayoutNVX;
- if (!strcmp(name, "DestroyIndirectCommandsLayoutNVX"))
- return (PFN_vkVoidFunction)DestroyIndirectCommandsLayoutNVX;
- if (!strcmp(name, "CreateObjectTableNVX"))
- return (PFN_vkVoidFunction)CreateObjectTableNVX;
- if (!strcmp(name, "DestroyObjectTableNVX"))
- return (PFN_vkVoidFunction)DestroyObjectTableNVX;
- if (!strcmp(name, "RegisterObjectsNVX"))
- return (PFN_vkVoidFunction)RegisterObjectsNVX;
- if (!strcmp(name, "UnregisterObjectsNVX"))
- return (PFN_vkVoidFunction)UnregisterObjectsNVX;
+ if (!strcmp(name, "CmdProcessCommandsNVX")) return (PFN_vkVoidFunction)CmdProcessCommandsNVX;
+ if (!strcmp(name, "CmdReserveSpaceForCommandsNVX")) return (PFN_vkVoidFunction)CmdReserveSpaceForCommandsNVX;
+ if (!strcmp(name, "CreateIndirectCommandsLayoutNVX")) return (PFN_vkVoidFunction)CreateIndirectCommandsLayoutNVX;
+ if (!strcmp(name, "DestroyIndirectCommandsLayoutNVX")) return (PFN_vkVoidFunction)DestroyIndirectCommandsLayoutNVX;
+ if (!strcmp(name, "CreateObjectTableNVX")) return (PFN_vkVoidFunction)CreateObjectTableNVX;
+ if (!strcmp(name, "DestroyObjectTableNVX")) return (PFN_vkVoidFunction)DestroyObjectTableNVX;
+ if (!strcmp(name, "RegisterObjectsNVX")) return (PFN_vkVoidFunction)RegisterObjectsNVX;
+ if (!strcmp(name, "UnregisterObjectsNVX")) return (PFN_vkVoidFunction)UnregisterObjectsNVX;
}
if (device_data->ext_display_control_enabled) {
- if (!strcmp(name, "DisplayPowerControlEXT"))
- return (PFN_vkVoidFunction)DisplayPowerControlEXT;
- if (!strcmp(name, "RegisterDeviceEventEXT"))
- return (PFN_vkVoidFunction)RegisterDeviceEventEXT;
- if (!strcmp(name, "RegisterDisplayEventEXT"))
- return (PFN_vkVoidFunction)RegisterDisplayEventEXT;
- if (!strcmp(name, "GetSwapchainCounterEXT"))
- return (PFN_vkVoidFunction)GetSwapchainCounterEXT;
+ if (!strcmp(name, "DisplayPowerControlEXT")) return (PFN_vkVoidFunction)DisplayPowerControlEXT;
+ if (!strcmp(name, "RegisterDeviceEventEXT")) return (PFN_vkVoidFunction)RegisterDeviceEventEXT;
+ if (!strcmp(name, "RegisterDisplayEventEXT")) return (PFN_vkVoidFunction)RegisterDisplayEventEXT;
+ if (!strcmp(name, "GetSwapchainCounterEXT")) return (PFN_vkVoidFunction)GetSwapchainCounterEXT;
}
}
@@ -4739,39 +4580,31 @@
}
static inline PFN_vkVoidFunction InterceptInstanceExtensionCommand(const char *name) {
- if (!name || name[0] != 'v' || name[1] != 'k')
- return NULL;
+ if (!name || name[0] != 'v' || name[1] != 'k') return NULL;
name += 2;
// VK_KHR_get_physical_device_properties2 Extension
- if (!strcmp(name, "GetPhysicalDeviceFeatures2KHR"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures2KHR;
- if (!strcmp(name, "GetPhysicalDeviceProperties2KHR"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceProperties2KHR;
- if (!strcmp(name, "GetPhysicalDeviceFormatProperties2KHR"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties2KHR;
+ if (!strcmp(name, "GetPhysicalDeviceFeatures2KHR")) return (PFN_vkVoidFunction)GetPhysicalDeviceFeatures2KHR;
+ if (!strcmp(name, "GetPhysicalDeviceProperties2KHR")) return (PFN_vkVoidFunction)GetPhysicalDeviceProperties2KHR;
+ if (!strcmp(name, "GetPhysicalDeviceFormatProperties2KHR")) return (PFN_vkVoidFunction)GetPhysicalDeviceFormatProperties2KHR;
if (!strcmp(name, "GetPhysicalDeviceImageFormatProperties2KHR"))
return (PFN_vkVoidFunction)GetPhysicalDeviceImageFormatProperties2KHR;
if (!strcmp(name, "GetPhysicalDeviceQueueFamilyProperties2KHR"))
return (PFN_vkVoidFunction)GetPhysicalDeviceQueueFamilyProperties2KHR;
- if (!strcmp(name, "GetPhysicalDeviceMemoryProperties2KHR"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties2KHR;
+ if (!strcmp(name, "GetPhysicalDeviceMemoryProperties2KHR")) return (PFN_vkVoidFunction)GetPhysicalDeviceMemoryProperties2KHR;
if (!strcmp(name, "GetPhysicalDeviceSparseImageFormatProperties2KHR"))
return (PFN_vkVoidFunction)GetPhysicalDeviceSparseImageFormatProperties2KHR;
// VK_NVX_device_generated_commands Extension
if (!strcmp(name, "GetPhysicalDeviceGeneratedCommandsPropertiesNVX"))
return (PFN_vkVoidFunction)GetPhysicalDeviceGeneratedCommandsPropertiesNVX;
// VK_EXT_direct_mode_display Extension
- if (!strcmp(name, "ReleaseDisplayEXT"))
- return (PFN_vkVoidFunction)ReleaseDisplayEXT;
+ if (!strcmp(name, "ReleaseDisplayEXT")) return (PFN_vkVoidFunction)ReleaseDisplayEXT;
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
// VK_EXT_acquire_xlib_display Extension
- if (!strcmp(name, "AcquireXlibDisplayEXT"))
- return (PFN_vkVoidFunction)AcquireXlibDisplayEXT;
- if (!strcmp(name, "GetRandROutputDisplayEXT"))
- return (PFN_vkVoidFunction)GetRandROutputDisplayEXT;
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+ if (!strcmp(name, "AcquireXlibDisplayEXT")) return (PFN_vkVoidFunction)AcquireXlibDisplayEXT;
+ if (!strcmp(name, "GetRandROutputDisplayEXT")) return (PFN_vkVoidFunction)GetRandROutputDisplayEXT;
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
// VK_EXT_display_surface_counter Extension
if (!strcmp(name, "GetPhysicalDeviceSurfaceCapabilities2EXT"))
return (PFN_vkVoidFunction)GetPhysicalDeviceSurfaceCapabilities2EXT;
@@ -4784,16 +4617,11 @@
layer_data *device_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
if (device_data->wsi_enabled) {
- if (!strcmp("vkCreateSwapchainKHR", name))
- return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR);
- if (!strcmp("vkDestroySwapchainKHR", name))
- return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR);
- if (!strcmp("vkGetSwapchainImagesKHR", name))
- return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR);
- if (!strcmp("vkAcquireNextImageKHR", name))
- return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR);
- if (!strcmp("vkQueuePresentKHR", name))
- return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR);
+ if (!strcmp("vkCreateSwapchainKHR", name)) return reinterpret_cast<PFN_vkVoidFunction>(CreateSwapchainKHR);
+ if (!strcmp("vkDestroySwapchainKHR", name)) return reinterpret_cast<PFN_vkVoidFunction>(DestroySwapchainKHR);
+ if (!strcmp("vkGetSwapchainImagesKHR", name)) return reinterpret_cast<PFN_vkVoidFunction>(GetSwapchainImagesKHR);
+ if (!strcmp("vkAcquireNextImageKHR", name)) return reinterpret_cast<PFN_vkVoidFunction>(AcquireNextImageKHR);
+ if (!strcmp("vkQueuePresentKHR", name)) return reinterpret_cast<PFN_vkVoidFunction>(QueuePresentKHR);
}
if (device_data->wsi_display_swapchain_enabled) {
@@ -4811,8 +4639,7 @@
return reinterpret_cast<PFN_vkVoidFunction>(GetDisplayPlaneSupportedDisplaysKHR);
if (!strcmp("vkGetDisplayModePropertiesKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(GetDisplayModePropertiesKHR);
- if (!strcmp("vkCreateDisplayModeKHR", name))
- return reinterpret_cast<PFN_vkVoidFunction>(CreateDisplayModeKHR);
+ if (!strcmp("vkCreateDisplayModeKHR", name)) return reinterpret_cast<PFN_vkVoidFunction>(CreateDisplayModeKHR);
if (!strcmp("vkGetDisplayPlaneCapabilitiesKHR", name))
return reinterpret_cast<PFN_vkVoidFunction>(GetDisplayPlaneCapabilitiesKHR);
if (!strcmp("vkCreateDisplayPlaneSurfaceKHR", name))
@@ -4886,7 +4713,7 @@
return get_dispatch_table(ot_instance_table_map, instance)->GetPhysicalDeviceProcAddr(instance, funcName);
}
-} // namespace object_tracker
+} // namespace object_tracker
// vk_layer_logging.h expects these to be defined
VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
diff --git a/layers/object_tracker.h b/layers/object_tracker.h
index 327c527..abbe3c2 100644
--- a/layers/object_tracker.h
+++ b/layers/object_tracker.h
@@ -32,36 +32,36 @@
// Object Tracker ERROR codes
enum OBJECT_TRACK_ERROR {
- OBJTRACK_NONE, // Used for INFO & other non-error messages
- OBJTRACK_UNKNOWN_OBJECT, // Updating uses of object that's not in global object list
- OBJTRACK_INTERNAL_ERROR, // Bug with data tracking within the layer
- OBJTRACK_OBJECT_LEAK, // OBJECT was not correctly freed/destroyed
- OBJTRACK_INVALID_OBJECT, // Object used that has never been created
- OBJTRACK_DESCRIPTOR_POOL_MISMATCH, // Descriptor Pools specified incorrectly
- OBJTRACK_COMMAND_POOL_MISMATCH, // Command Pools specified incorrectly
- OBJTRACK_ALLOCATOR_MISMATCH, // Created with custom allocator but destroyed without
+ OBJTRACK_NONE, // Used for INFO & other non-error messages
+ OBJTRACK_UNKNOWN_OBJECT, // Updating uses of object that's not in global object list
+ OBJTRACK_INTERNAL_ERROR, // Bug with data tracking within the layer
+ OBJTRACK_OBJECT_LEAK, // OBJECT was not correctly freed/destroyed
+ OBJTRACK_INVALID_OBJECT, // Object used that has never been created
+ OBJTRACK_DESCRIPTOR_POOL_MISMATCH, // Descriptor Pools specified incorrectly
+ OBJTRACK_COMMAND_POOL_MISMATCH, // Command Pools specified incorrectly
+ OBJTRACK_ALLOCATOR_MISMATCH, // Created with custom allocator but destroyed without
};
// Object Status -- used to track state of individual objects
typedef VkFlags ObjectStatusFlags;
enum ObjectStatusFlagBits {
- OBJSTATUS_NONE = 0x00000000, // No status is set
- OBJSTATUS_FENCE_IS_SUBMITTED = 0x00000001, // Fence has been submitted
- OBJSTATUS_VIEWPORT_BOUND = 0x00000002, // Viewport state object has been bound
- OBJSTATUS_RASTER_BOUND = 0x00000004, // Viewport state object has been bound
- OBJSTATUS_COLOR_BLEND_BOUND = 0x00000008, // Viewport state object has been bound
- OBJSTATUS_DEPTH_STENCIL_BOUND = 0x00000010, // Viewport state object has been bound
- OBJSTATUS_GPU_MEM_MAPPED = 0x00000020, // Memory object is currently mapped
- OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000040, // Command Buffer is of type SECONDARY
- OBJSTATUS_CUSTOM_ALLOCATOR = 0x00000080, // Allocated with custom allocator
+ OBJSTATUS_NONE = 0x00000000, // No status is set
+ OBJSTATUS_FENCE_IS_SUBMITTED = 0x00000001, // Fence has been submitted
+ OBJSTATUS_VIEWPORT_BOUND = 0x00000002, // Viewport state object has been bound
+ OBJSTATUS_RASTER_BOUND = 0x00000004, // Viewport state object has been bound
+ OBJSTATUS_COLOR_BLEND_BOUND = 0x00000008, // Viewport state object has been bound
+ OBJSTATUS_DEPTH_STENCIL_BOUND = 0x00000010, // Viewport state object has been bound
+ OBJSTATUS_GPU_MEM_MAPPED = 0x00000020, // Memory object is currently mapped
+ OBJSTATUS_COMMAND_BUFFER_SECONDARY = 0x00000040, // Command Buffer is of type SECONDARY
+ OBJSTATUS_CUSTOM_ALLOCATOR = 0x00000080, // Allocated with custom allocator
};
// Object and state information structure
struct OBJTRACK_NODE {
- uint64_t handle; // Object handle (new)
- VkDebugReportObjectTypeEXT object_type; // Object type identifier
- ObjectStatusFlags status; // Object state
- uint64_t parent_object; // Parent object
+ uint64_t handle; // Object handle (new)
+ VkDebugReportObjectTypeEXT object_type; // Object type identifier
+ ObjectStatusFlags status; // Object state
+ uint64_t parent_object; // Parent object
};
// Track Queue information
@@ -119,10 +119,20 @@
VkLayerDispatchTable dispatch_table;
// Default constructor
layer_data()
- : instance(nullptr), physical_device(nullptr), num_objects{}, num_total_objects(0), report_data(nullptr),
- wsi_enabled(false), wsi_display_swapchain_enabled(false), wsi_display_extension_enabled(false),
- objtrack_extensions_enabled(false), num_tmp_callbacks(0), tmp_dbg_create_infos(nullptr), tmp_callbacks(nullptr),
- object_map{}, dispatch_table{} {
+ : instance(nullptr),
+ physical_device(nullptr),
+ num_objects{},
+ num_total_objects(0),
+ report_data(nullptr),
+ wsi_enabled(false),
+ wsi_display_swapchain_enabled(false),
+ wsi_display_extension_enabled(false),
+ objtrack_extensions_enabled(false),
+ num_tmp_callbacks(0),
+ tmp_dbg_create_infos(nullptr),
+ tmp_callbacks(nullptr),
+ object_map{},
+ dispatch_table{} {
object_map.resize(VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT + 1);
}
};
@@ -136,36 +146,36 @@
// Array of object name strings for OBJECT_TYPE enum conversion
static const char *object_name[VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT] = {
- "Unknown", // VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN
- "Instance", // VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT
- "Physical Device", // VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT
- "Device", // VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT
- "Queue", // VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT
- "Semaphore", // VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT
- "Command Buffer", // VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT
- "Fence", // VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT
- "Device Memory", // VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT
- "Buffer", // VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT
- "Image", // VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT
- "Event", // VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT
- "Query Pool", // VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT
- "Buffer View", // VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT
- "Image View", // VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT
- "Shader Module", // VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT
- "Pipeline Cache", // VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT
- "Pipeline Layout", // VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT
- "Render Pass", // VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT
- "Pipeline", // VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT
- "Descriptor Set Layout", // VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT
- "Sampler", // VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT
- "Descriptor Pool", // VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT
- "Descriptor Set", // VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT
- "Framebuffer", // VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT
- "Command Pool", // VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT
- "SurfaceKHR", // VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT
- "SwapchainKHR", // VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT
- "Debug Report"}; // VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT
+ "Unknown", // VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN
+ "Instance", // VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT
+ "Physical Device", // VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT
+ "Device", // VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT
+ "Queue", // VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT
+ "Semaphore", // VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT
+ "Command Buffer", // VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT
+ "Fence", // VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT
+ "Device Memory", // VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT
+ "Buffer", // VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT
+ "Image", // VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT
+ "Event", // VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT
+ "Query Pool", // VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT
+ "Buffer View", // VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT
+ "Image View", // VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT
+ "Shader Module", // VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT
+ "Pipeline Cache", // VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT
+ "Pipeline Layout", // VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT
+ "Render Pass", // VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT
+ "Pipeline", // VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT
+ "Descriptor Set Layout", // VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT
+ "Sampler", // VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT
+ "Descriptor Pool", // VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT
+ "Descriptor Set", // VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT
+ "Framebuffer", // VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT
+ "Command Pool", // VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT
+ "SurfaceKHR", // VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT
+ "SwapchainKHR", // VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT
+ "Debug Report"}; // VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT
#include "vk_dispatch_table_helper.h"
-} // namespace object_tracker
+} // namespace object_tracker
diff --git a/layers/parameter_name.h b/layers/parameter_name.h
index b788584..6459b5e 100644
--- a/layers/parameter_name.h
+++ b/layers/parameter_name.h
@@ -42,7 +42,7 @@
* validate_stype(ParameterName("pCreateInfo[%i].sType", IndexVector{ i }), pCreateInfo[i].sType);
*/
class ParameterName {
- public:
+ public:
/// Container for index values to be used with parameter name string formatting.
typedef std::vector<size_t> IndexVector;
@@ -50,7 +50,7 @@
/// one format specifier for each index value specified.
const std::string IndexFormatSpecifier = "%i";
- public:
+ public:
/**
* Construct a ParameterName object from a string literal, without formatting.
*
@@ -105,7 +105,7 @@
/// Retrive the formatted name string.
std::string get_name() const { return (args_.empty()) ? source_ : Format(); }
- private:
+ private:
/// Replace the %i format specifiers in the source string with the values from the index vector.
std::string Format() const {
std::string::size_type current = 0;
@@ -140,9 +140,9 @@
return (count == args_.size());
}
- private:
- std::string source_; ///< Format string.
- IndexVector args_; ///< Array index values for formatting.
+ private:
+ std::string source_; ///< Format string.
+ IndexVector args_; ///< Array index values for formatting.
};
-#endif // PARAMETER_NAME_H
+#endif // PARAMETER_NAME_H
diff --git a/layers/parameter_validation.cpp b/layers/parameter_validation.cpp
index 61949c6..6ee9665 100644
--- a/layers/parameter_validation.cpp
+++ b/layers/parameter_validation.cpp
@@ -101,7 +101,6 @@
static std::unordered_map<void *, instance_layer_data *> instance_layer_data_map;
static void init_parameter_validation(instance_layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
-
layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_parameter_validation");
}
@@ -1228,7 +1227,6 @@
if (result == VK_STRING_ERROR_NONE) {
return skip;
} else if (result & VK_STRING_ERROR_LENGTH) {
-
skip = log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
INVALID_USAGE, LayerName, "%s: string %s exceeds max length %d", apiName, stringName.get_name().c_str(),
MaxParamCheckerStringLength);
@@ -1278,8 +1276,9 @@
const auto &queue_data = device_data->queueFamilyIndexMap.find(indices[i]);
if (queue_data == device_data->queueFamilyIndexMap.end()) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
- LayerName, "%s: %s[%d] (%d) must be one of the indices specified when the device was "
- "created, via the VkDeviceQueueCreateInfo structure.",
+ LayerName,
+ "%s: %s[%d] (%d) must be one of the indices specified when the device was "
+ "created, via the VkDeviceQueueCreateInfo structure.",
function_name, parameter_name, i, indices[i]);
return false;
}
@@ -1565,7 +1564,6 @@
}
static void CheckInstanceRegisterExtensions(const VkInstanceCreateInfo *pCreateInfo, instance_layer_data *instance_data) {
-
for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
auto name = pCreateInfo->ppEnabledExtensionNames[i];
@@ -2503,11 +2501,11 @@
// If imageType is VK_IMAGE_TYPE_1D, both extent.height and extent.depth must be 1
if ((pCreateInfo->imageType == VK_IMAGE_TYPE_1D) && (pCreateInfo->extent.height != 1) && (pCreateInfo->extent.depth != 1)) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
- VALIDATION_ERROR_02129, LayerName, "vkCreateImage(): if pCreateInfo->imageType is VK_IMAGE_TYPE_1D, both "
- "pCreateInfo->extent.height and pCreateInfo->extent.depth must be 1. %s",
- validation_error_map[VALIDATION_ERROR_02129]);
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__,
+ VALIDATION_ERROR_02129, LayerName,
+ "vkCreateImage(): if pCreateInfo->imageType is VK_IMAGE_TYPE_1D, both "
+ "pCreateInfo->extent.height and pCreateInfo->extent.depth must be 1. %s",
+ validation_error_map[VALIDATION_ERROR_02129]);
}
if (pCreateInfo->imageType == VK_IMAGE_TYPE_2D) {
@@ -2620,46 +2618,52 @@
if ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D) || (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D)) {
if ((pCreateInfo->subresourceRange.layerCount != 1) &&
(pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
- LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_%dD, "
- "pCreateInfo->subresourceRange.layerCount must be 1",
- ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D) ? 1 : 2));
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName,
+ "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_%dD, "
+ "pCreateInfo->subresourceRange.layerCount must be 1",
+ ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D) ? 1 : 2));
}
} else if ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) ||
(pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY)) {
if ((pCreateInfo->subresourceRange.layerCount < 1) &&
(pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
- LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_%dD_ARRAY, "
- "pCreateInfo->subresourceRange.layerCount must be >= 1",
- ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) ? 1 : 2));
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName,
+ "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_%dD_ARRAY, "
+ "pCreateInfo->subresourceRange.layerCount must be >= 1",
+ ((pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY) ? 1 : 2));
}
} else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_CUBE) {
if ((pCreateInfo->subresourceRange.layerCount != 6) &&
(pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
- LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_CUBE, "
- "pCreateInfo->subresourceRange.layerCount must be 6");
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName,
+ "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_CUBE, "
+ "pCreateInfo->subresourceRange.layerCount must be 6");
}
} else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) {
if (((pCreateInfo->subresourceRange.layerCount == 0) || ((pCreateInfo->subresourceRange.layerCount % 6) != 0)) &&
(pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
- LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_CUBE_ARRAY, "
- "pCreateInfo->subresourceRange.layerCount must be a multiple of 6");
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName,
+ "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_CUBE_ARRAY, "
+ "pCreateInfo->subresourceRange.layerCount must be a multiple of 6");
}
} else if (pCreateInfo->viewType == VK_IMAGE_VIEW_TYPE_3D) {
if (pCreateInfo->subresourceRange.baseArrayLayer != 0) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
- LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_3D, "
- "pCreateInfo->subresourceRange.baseArrayLayer must be 0");
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName,
+ "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_3D, "
+ "pCreateInfo->subresourceRange.baseArrayLayer must be 0");
}
if ((pCreateInfo->subresourceRange.layerCount != 1) &&
(pCreateInfo->subresourceRange.layerCount != VK_REMAINING_ARRAY_LAYERS)) {
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1,
- LayerName, "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_3D, "
- "pCreateInfo->subresourceRange.layerCount must be 1");
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, (VkDebugReportObjectTypeEXT)0, 0, __LINE__, 1, LayerName,
+ "vkCreateImageView: if pCreateInfo->viewType is VK_IMAGE_TYPE_3D, "
+ "pCreateInfo->subresourceRange.layerCount must be 1");
}
}
}
@@ -3433,12 +3437,12 @@
for (uint32_t descriptor_index = 0; descriptor_index < pCreateInfo->pBindings[i].descriptorCount;
++descriptor_index) {
if (pCreateInfo->pBindings[i].pImmutableSamplers[descriptor_index] == VK_NULL_HANDLE) {
- skip |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
- __LINE__, REQUIRED_PARAMETER, LayerName, "vkCreateDescriptorSetLayout: required parameter "
- "pCreateInfo->pBindings[%d].pImmutableSamplers[%d]"
- " specified as VK_NULL_HANDLE",
- i, descriptor_index);
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0,
+ __LINE__, REQUIRED_PARAMETER, LayerName,
+ "vkCreateDescriptorSetLayout: required parameter "
+ "pCreateInfo->pBindings[%d].pImmutableSamplers[%d]"
+ " specified as VK_NULL_HANDLE",
+ i, descriptor_index);
}
}
}
@@ -4721,7 +4725,6 @@
bool PostCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool,
uint32_t slot) {
-
ValidateEnumerator(pipelineStage);
return true;
@@ -4834,8 +4837,7 @@
VKAPI_ATTR VkResult VKAPI_CALL EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName,
uint32_t *pCount, VkExtensionProperties *pProperties) {
/* parameter_validation does not have any physical device extensions */
- if (pLayerName && !strcmp(pLayerName, global_layer.layerName))
- return util_GetExtensionProperties(0, NULL, pCount, pProperties);
+ if (pLayerName && !strcmp(pLayerName, global_layer.layerName)) return util_GetExtensionProperties(0, NULL, pCount, pProperties);
assert(physicalDevice);
@@ -5112,7 +5114,7 @@
return result;
}
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
@@ -5159,7 +5161,7 @@
return result;
}
-#endif // VK_USE_PLATFORM_XCB_KHR
+#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
@@ -5205,7 +5207,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_XLIB_KHR
+#endif // VK_USE_PLATFORM_XLIB_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
@@ -5249,7 +5251,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_MIR_KHR
+#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
@@ -5294,7 +5296,7 @@
return result;
}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_ANDROID_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateAndroidSurfaceKHR(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR *pCreateInfo,
@@ -5318,7 +5320,7 @@
return result;
}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+#endif // VK_USE_PLATFORM_ANDROID_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateSharedSwapchainsKHR(VkDevice device, uint32_t swapchainCount,
const VkSwapchainCreateInfoKHR *pCreateInfos,
@@ -5645,7 +5647,6 @@
#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
VKAPI_ATTR VkResult VKAPI_CALL AcquireXlibDisplayEXT(VkPhysicalDevice physicalDevice, Display *dpy, VkDisplayKHR display) {
-
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
auto my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), instance_layer_data_map);
assert(my_data != NULL);
@@ -5662,7 +5663,6 @@
VKAPI_ATTR VkResult VKAPI_CALL GetRandROutputDisplayEXT(VkPhysicalDevice physicalDevice, Display *dpy, RROutput rrOutput,
VkDisplayKHR *pDisplay) {
-
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
auto my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), instance_layer_data_map);
assert(my_data != NULL);
@@ -5676,7 +5676,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#endif // VK_USE_PLATFORM_XLIB_XRANDR_EXT
// Definitions for the VK_EXT_debug_marker Extension
@@ -5759,14 +5759,13 @@
// Definitions for the VK_EXT_direct_mode_display extension
VKAPI_ATTR VkResult VKAPI_CALL ReleaseDisplayEXT(VkPhysicalDevice physicalDevice, VkDisplayKHR display) {
-
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
auto my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), instance_layer_data_map);
assert(my_data != NULL);
bool skip = false;
skip |= require_instance_extension(physicalDevice, &instance_extension_enables::ext_direct_mode_display_enabled,
"vkReleaseDisplayEXT", VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME);
-#if 0 // Validation not automatically generated
+#if 0 // Validation not automatically generated
skip |= parameter_validation_vkReleaseDisplayEXT(my_data->report_data, display);
#endif
if (!skip) {
@@ -5780,7 +5779,6 @@
VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceSurfaceCapabilities2EXT(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface,
VkSurfaceCapabilities2EXT *pSurfaceCapabilities) {
-
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
auto my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), instance_layer_data_map);
assert(my_data != NULL);
@@ -5801,7 +5799,6 @@
VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage,
VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType,
VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties) {
-
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
bool skip = false;
auto my_data = get_my_data_ptr(get_dispatch_key(physicalDevice), instance_layer_data_map);
@@ -5829,7 +5826,6 @@
#ifdef VK_USE_PLATFORM_WIN32_KHR
VKAPI_ATTR VkResult VKAPI_CALL GetMemoryWin32HandleNV(VkDevice device, VkDeviceMemory memory,
VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE *pHandle) {
-
VkResult result = VK_ERROR_VALIDATION_FAILED_EXT;
bool skip = false;
layer_data *my_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
@@ -5846,7 +5842,7 @@
return result;
}
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
// VK_NVX_device_generated_commands Extension
@@ -5902,7 +5898,7 @@
assert(my_data != NULL);
skip |= require_device_extension(my_data, my_data->enables.nvx_device_generated_commands, "vkDestroyIndirectCommandsLayoutNVX",
VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
-#if 0 // Validation not automatically generated
+#if 0 // Validation not automatically generated
skip |= parameter_validation_vkDestroyIndirectCommandsLayoutNVX(my_data->report_data, indirectCommandsLayout, pAllocator);
#endif
if (!skip) {
@@ -5933,7 +5929,7 @@
assert(my_data != NULL);
skip |= require_device_extension(my_data, my_data->enables.nvx_device_generated_commands, "vkDestroyObjectTableNVX",
VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME);
-#if 0 // Validation not automatically generated
+#if 0 // Validation not automatically generated
skip |= parameter_validation_vkDestroyObjectTableNVX(my_data->report_data, objectTable, pAllocator);
#endif
if (!skip) {
@@ -6010,49 +6006,38 @@
}
PFN_vkVoidFunction proc = intercept_core_device_command(funcName);
- if (proc)
- return proc;
+ if (proc) return proc;
proc = InterceptWsiEnabledCommand(funcName, device);
- if (proc)
- return proc;
+ if (proc) return proc;
proc = intercept_extension_device_command(funcName, device);
- if (proc)
- return proc;
+ if (proc) return proc;
- if (!data->dispatch_table.GetDeviceProcAddr)
- return nullptr;
+ if (!data->dispatch_table.GetDeviceProcAddr) return nullptr;
return data->dispatch_table.GetDeviceProcAddr(device, funcName);
}
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
PFN_vkVoidFunction proc = intercept_core_instance_command(funcName);
- if (!proc)
- proc = intercept_core_device_command(funcName);
+ if (!proc) proc = intercept_core_device_command(funcName);
- if (!proc)
- proc = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE));
+ if (!proc) proc = InterceptWsiEnabledCommand(funcName, VkDevice(VK_NULL_HANDLE));
- if (proc)
- return proc;
+ if (proc) return proc;
assert(instance);
auto data = get_my_data_ptr(get_dispatch_key(instance), instance_layer_data_map);
proc = debug_report_get_instance_proc_addr(data->report_data, funcName);
- if (!proc)
- proc = InterceptWsiEnabledCommand(funcName, instance);
+ if (!proc) proc = InterceptWsiEnabledCommand(funcName, instance);
- if (!proc)
- proc = intercept_extension_instance_command(funcName, instance);
+ if (!proc) proc = intercept_extension_instance_command(funcName, instance);
- if (proc)
- return proc;
+ if (proc) return proc;
- if (!data->dispatch_table.GetInstanceProcAddr)
- return nullptr;
+ if (!data->dispatch_table.GetInstanceProcAddr) return nullptr;
return data->dispatch_table.GetInstanceProcAddr(instance, funcName);
}
@@ -6060,8 +6045,7 @@
assert(instance);
auto data = get_my_data_ptr(get_dispatch_key(instance), instance_layer_data_map);
- if (!data->dispatch_table.GetPhysicalDeviceProcAddr)
- return nullptr;
+ if (!data->dispatch_table.GetPhysicalDeviceProcAddr) return nullptr;
return data->dispatch_table.GetPhysicalDeviceProcAddr(instance, funcName);
}
@@ -6096,8 +6080,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(core_instance_commands); i++) {
- if (!strcmp(core_instance_commands[i].name, name))
- return core_instance_commands[i].proc;
+ if (!strcmp(core_instance_commands[i].name, name)) return core_instance_commands[i].proc;
}
return nullptr;
@@ -6231,7 +6214,7 @@
{"vkCmdDebugMarkerInsertEXT", reinterpret_cast<PFN_vkVoidFunction>(CmdDebugMarkerInsertEXT)},
#ifdef VK_USE_PLATFORM_WIN32_KHR
{"vkGetMemoryWin32HandleNV", reinterpret_cast<PFN_vkVoidFunction>(GetMemoryWin32HandleNV)},
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
// NVX_device_generated_commands
{"vkCmdProcessCommandsNVX", reinterpret_cast<PFN_vkVoidFunction>(CmdProcessCommandsNVX)},
{"vkCmdReserveSpaceForCommandsNVX", reinterpret_cast<PFN_vkVoidFunction>(CmdReserveSpaceForCommandsNVX)},
@@ -6244,8 +6227,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(core_device_commands); i++) {
- if (!strcmp(core_device_commands[i].name, name))
- return core_device_commands[i].proc;
+ if (!strcmp(core_device_commands[i].name, name)) return core_device_commands[i].proc;
}
return nullptr;
@@ -6266,8 +6248,7 @@
if (device) {
for (size_t i = 0; i < ARRAY_SIZE(wsi_device_commands); i++) {
- if (!strcmp(wsi_device_commands[i].name, name))
- return wsi_device_commands[i].proc;
+ if (!strcmp(wsi_device_commands[i].name, name)) return wsi_device_commands[i].proc;
}
if (!strcmp("vkCreateSharedSwapchainsKHR", name)) {
@@ -6329,8 +6310,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(wsi_instance_commands); i++) {
- if (!strcmp(wsi_instance_commands[i].name, name))
- return wsi_instance_commands[i].proc;
+ if (!strcmp(wsi_instance_commands[i].name, name)) return wsi_instance_commands[i].proc;
}
return nullptr;
@@ -6360,8 +6340,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(extension_instance_commands); i++) {
- if (!strcmp(extension_instance_commands[i].name, name))
- return extension_instance_commands[i].proc;
+ if (!strcmp(extension_instance_commands[i].name, name)) return extension_instance_commands[i].proc;
}
return nullptr;
@@ -6377,7 +6356,7 @@
#ifdef VK_USE_PLATFORM_WIN32_KHR
// NV_external_memory_win32
{"vkGetMemoryWin32HandleNV", reinterpret_cast<PFN_vkVoidFunction>(GetMemoryWin32HandleNV)},
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
// EXT_debug_marker
{"vkDebugMarkerSetObjectTagEXT", reinterpret_cast<PFN_vkVoidFunction>(DebugMarkerSetObjectTagEXT)},
{"vkDebugMarkerSetObjectNameEXT", reinterpret_cast<PFN_vkVoidFunction>(DebugMarkerSetObjectNameEXT)},
@@ -6396,15 +6375,14 @@
if (device) {
for (size_t i = 0; i < ARRAY_SIZE(extension_device_commands); i++) {
- if (!strcmp(extension_device_commands[i].name, name))
- return extension_device_commands[i].proc;
+ if (!strcmp(extension_device_commands[i].name, name)) return extension_device_commands[i].proc;
}
}
return nullptr;
}
-} // namespace parameter_validation
+} // namespace parameter_validation
// vk_layer_logging.h expects these to be defined
diff --git a/layers/parameter_validation_utils.h b/layers/parameter_validation_utils.h
index 3cc85c0..e3197b6 100644
--- a/layers/parameter_validation_utils.h
+++ b/layers/parameter_validation_utils.h
@@ -35,32 +35,32 @@
namespace parameter_validation {
enum ErrorCode {
- NONE, // Used for INFO & other non-error messages
- INVALID_USAGE, // The value of a parameter is not consistent
- // with the valid usage criteria defined in
- // the Vulkan specification.
- INVALID_STRUCT_STYPE, // The sType field of a Vulkan structure does
- // not contain the value expected for a structure
- // of that type.
- INVALID_STRUCT_PNEXT, // The pNext field of a Vulkan structure references
- // a value that is not compatible with a structure of
- // that type or is not NULL when a structure of that
- // type has no compatible pNext values.
- REQUIRED_PARAMETER, // A required parameter was specified as 0 or NULL.
- RESERVED_PARAMETER, // A parameter reserved for future use was not
- // specified as 0 or NULL.
- UNRECOGNIZED_VALUE, // A Vulkan enumeration, VkFlags, or VkBool32 parameter
- // contains a value that is not recognized as valid for
- // that type.
- DEVICE_LIMIT, // A specified parameter exceeds the limits returned
- // by the physical device
- DEVICE_FEATURE, // Use of a requested feature is not supported by
- // the device
- FAILURE_RETURN_CODE, // A Vulkan return code indicating a failure condition
- // was encountered.
- EXTENSION_NOT_ENABLED, // An extension entrypoint was called, but the required
- // extension was not enabled at CreateInstance or
- // CreateDevice time.
+ NONE, // Used for INFO & other non-error messages
+ INVALID_USAGE, // The value of a parameter is not consistent
+ // with the valid usage criteria defined in
+ // the Vulkan specification.
+ INVALID_STRUCT_STYPE, // The sType field of a Vulkan structure does
+ // not contain the value expected for a structure
+ // of that type.
+ INVALID_STRUCT_PNEXT, // The pNext field of a Vulkan structure references
+ // a value that is not compatible with a structure of
+ // that type or is not NULL when a structure of that
+ // type has no compatible pNext values.
+ REQUIRED_PARAMETER, // A required parameter was specified as 0 or NULL.
+ RESERVED_PARAMETER, // A parameter reserved for future use was not
+ // specified as 0 or NULL.
+ UNRECOGNIZED_VALUE, // A Vulkan enumeration, VkFlags, or VkBool32 parameter
+ // contains a value that is not recognized as valid for
+ // that type.
+ DEVICE_LIMIT, // A specified parameter exceeds the limits returned
+ // by the physical device
+ DEVICE_FEATURE, // Use of a requested feature is not supported by
+ // the device
+ FAILURE_RETURN_CODE, // A Vulkan return code indicating a failure condition
+ // was encountered.
+ EXTENSION_NOT_ENABLED, // An extension entrypoint was called, but the required
+ // extension was not enabled at CreateInstance or
+ // CreateDevice time.
};
struct GenericHeader {
@@ -99,14 +99,16 @@
// See Appendix C.10 "Assigning Extension Token Values" from the Vulkan specification
const uint32_t ExtEnumBaseValue = 1000000000;
-template <typename T> bool is_extension_added_token(T value) {
+template <typename T>
+bool is_extension_added_token(T value) {
return (static_cast<uint32_t>(std::abs(static_cast<int32_t>(value))) >= ExtEnumBaseValue);
}
// VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE token is a special case that was converted from a core token to an
// extension added token. Its original value was intentionally preserved after the conversion, so it does not use
// the base value that other extension added tokens use, and it does not fall within the enum's begin/end range.
-template <> bool is_extension_added_token(VkSamplerAddressMode value) {
+template <>
+bool is_extension_added_token(VkSamplerAddressMode value) {
bool result = (static_cast<uint32_t>(std::abs(static_cast<int32_t>(value))) >= ExtEnumBaseValue);
return (result || (value == VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE));
}
@@ -152,7 +154,6 @@
bool skip_call = false;
if (value == NULL) {
-
skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
REQUIRED_PARAMETER, LayerName, "%s: required parameter %s specified as NULL", apiName,
parameterName.get_name().c_str());
@@ -486,10 +487,11 @@
const char *allowed_struct_names, const void *next, size_t allowed_type_count,
const VkStructureType *allowed_types, uint32_t header_version) {
bool skip_call = false;
- const char disclaimer[] = "This warning is based on the Valid Usage documentation for version %d of the Vulkan header. It "
- "is possible that you are using a struct from a private extension or an extension that was added "
- "to a later version of the Vulkan header, in which case your use of %s is perfectly valid but "
- "is not guaranteed to work correctly with validation enabled";
+ const char disclaimer[] =
+ "This warning is based on the Valid Usage documentation for version %d of the Vulkan header. It "
+ "is possible that you are using a struct from a private extension or an extension that was added "
+ "to a later version of the Vulkan header, in which case your use of %s is perfectly valid but "
+ "is not guaranteed to work correctly with validation enabled";
if (next != NULL) {
if (allowed_type_count == 0) {
@@ -508,8 +510,9 @@
std::string type_name = string_VkStructureType(current->sType);
if (type_name == UnsupportedStructureTypeString) {
- std::string message = "%s: %s chain includes a structure with unexpected VkStructureType (%d); Allowed "
- "structures are [%s]. ";
+ std::string message =
+ "%s: %s chain includes a structure with unexpected VkStructureType (%d); Allowed "
+ "structures are [%s]. ";
message += disclaimer;
skip_call |= log_msg(report_data, VK_DEBUG_REPORT_WARNING_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
0, __LINE__, INVALID_STRUCT_PNEXT, LayerName, message.c_str(), api_name,
@@ -583,11 +586,11 @@
bool skip_call = false;
if (((value < begin) || (value > end)) && !is_extension_added_token(value)) {
- skip_call |=
- log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
- UNRECOGNIZED_VALUE, LayerName, "%s: value of %s (%d) does not fall within the begin..end range of the core %s "
- "enumeration tokens and is not an extension added token",
- apiName, parameterName.get_name().c_str(), value, enumName);
+ skip_call |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, 0, __LINE__,
+ UNRECOGNIZED_VALUE, LayerName,
+ "%s: value of %s (%d) does not fall within the begin..end range of the core %s "
+ "enumeration tokens and is not an extension added token",
+ apiName, parameterName.get_name().c_str(), value, enumName);
}
return skip_call;
@@ -820,6 +823,6 @@
}
}
-} // namespace parameter_validation
+} // namespace parameter_validation
-#endif // PARAMETER_VALIDATION_UTILS_H
+#endif // PARAMETER_VALIDATION_UTILS_H
diff --git a/layers/swapchain.cpp b/layers/swapchain.cpp
index 29ec8b4..cd7f601 100644
--- a/layers/swapchain.cpp
+++ b/layers/swapchain.cpp
@@ -88,7 +88,6 @@
// vkEnumerateInstanceExtensionProperties(), since the loader handles that.
for (i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
if (strcmp(pCreateInfo->ppEnabledExtensionNames[i], VK_KHR_DISPLAY_EXTENSION_NAME) == 0) {
-
my_data->instanceMap[instance].displayExtensionEnabled = true;
}
}
@@ -96,7 +95,6 @@
#include "vk_dispatch_table_helper.h"
static void init_swapchain(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
-
layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "lunarg_swapchain");
}
@@ -201,7 +199,6 @@
// Delete all of the SwpPhysicalDevice's, SwpSurface's, and the
// SwpInstance associated with this instance:
for (auto it = pInstance->physicalDevices.begin(); it != pInstance->physicalDevices.end(); it++) {
-
// Free memory that was allocated for/by this SwpPhysicalDevice:
SwpPhysicalDevice *pPhysicalDevice = it->second;
if (pPhysicalDevice) {
@@ -219,7 +216,6 @@
my_data->physicalDeviceMap.erase(it->second->physicalDevice);
}
for (auto it = pInstance->surfaces.begin(); it != pInstance->surfaces.end(); it++) {
-
// Free memory that was allocated for/by this SwpPhysicalDevice:
SwpSurface *pSurface = it->second;
if (pSurface) {
@@ -320,7 +316,7 @@
}
return VK_ERROR_VALIDATION_FAILED_EXT;
}
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateMirSurfaceKHR(VkInstance instance, const VkMirSurfaceCreateInfoKHR *pCreateInfo,
@@ -389,7 +385,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_MIR_KHR
+#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
@@ -459,7 +455,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo,
@@ -527,7 +523,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo,
@@ -597,7 +593,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_XCB_KHR
+#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
VKAPI_ATTR VkResult VKAPI_CALL CreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo,
@@ -667,7 +663,7 @@
}
return result;
}
-#endif // VK_USE_PLATFORM_XLIB_KHR
+#endif // VK_USE_PLATFORM_XLIB_KHR
VKAPI_ATTR VkResult VKAPI_CALL GetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount,
VkDisplayPlanePropertiesKHR *pProperties) {
@@ -1320,8 +1316,7 @@
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetDeviceProcAddr(VkDevice device, const char *funcName) {
PFN_vkVoidFunction proc = intercept_core_device_command(funcName);
- if (proc)
- return proc;
+ if (proc) return proc;
assert(device);
@@ -1331,22 +1326,17 @@
VkLayerDispatchTable *pDisp = my_data->device_dispatch_table;
proc = intercept_khr_swapchain_command(funcName, device);
- if (proc)
- return proc;
+ if (proc) return proc;
- if (pDisp->GetDeviceProcAddr == NULL)
- return NULL;
+ if (pDisp->GetDeviceProcAddr == NULL) return NULL;
return pDisp->GetDeviceProcAddr(device, funcName);
}
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetInstanceProcAddr(VkInstance instance, const char *funcName) {
PFN_vkVoidFunction proc = intercept_core_instance_command(funcName);
- if (!proc)
- proc = intercept_core_device_command(funcName);
- if (!proc)
- proc = intercept_khr_swapchain_command(funcName, VK_NULL_HANDLE);
- if (proc)
- return proc;
+ if (!proc) proc = intercept_core_device_command(funcName);
+ if (!proc) proc = intercept_khr_swapchain_command(funcName, VK_NULL_HANDLE);
+ if (proc) return proc;
assert(instance);
@@ -1355,13 +1345,10 @@
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
proc = debug_report_get_instance_proc_addr(my_data->report_data, funcName);
- if (!proc)
- proc = intercept_khr_surface_command(funcName, instance);
- if (proc)
- return proc;
+ if (!proc) proc = intercept_khr_surface_command(funcName, instance);
+ if (proc) return proc;
- if (pTable->GetInstanceProcAddr == NULL)
- return NULL;
+ if (pTable->GetInstanceProcAddr == NULL) return NULL;
return pTable->GetInstanceProcAddr(instance, funcName);
}
@@ -1372,8 +1359,7 @@
my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
- if (pTable->GetPhysicalDeviceProcAddr == NULL)
- return NULL;
+ if (pTable->GetPhysicalDeviceProcAddr == NULL) return NULL;
return pTable->GetPhysicalDeviceProcAddr(instance, funcName);
}
@@ -1396,8 +1382,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(core_instance_commands); i++) {
- if (!strcmp(core_instance_commands[i].name, name))
- return core_instance_commands[i].proc;
+ if (!strcmp(core_instance_commands[i].name, name)) return core_instance_commands[i].proc;
}
return nullptr;
@@ -1410,32 +1395,32 @@
} khr_surface_commands[] = {
#ifdef VK_USE_PLATFORM_ANDROID_KHR
{"vkCreateAndroidSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateAndroidSurfaceKHR)},
-#endif // VK_USE_PLATFORM_ANDROID_KHR
+#endif // VK_USE_PLATFORM_ANDROID_KHR
#ifdef VK_USE_PLATFORM_MIR_KHR
{"vkCreateMirSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateMirSurfaceKHR)},
{"vkGetPhysicalDeviceMirPresentationSupportKHR",
reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceMirPresentationSupportKHR)},
-#endif // VK_USE_PLATFORM_MIR_KHR
+#endif // VK_USE_PLATFORM_MIR_KHR
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
{"vkCreateWaylandSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateWaylandSurfaceKHR)},
{"vkGetPhysicalDeviceWaylandPresentationSupportKHR",
reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWaylandPresentationSupportKHR)},
-#endif // VK_USE_PLATFORM_WAYLAND_KHR
+#endif // VK_USE_PLATFORM_WAYLAND_KHR
#ifdef VK_USE_PLATFORM_WIN32_KHR
{"vkCreateWin32SurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateWin32SurfaceKHR)},
{"vkGetPhysicalDeviceWin32PresentationSupportKHR",
reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceWin32PresentationSupportKHR)},
-#endif // VK_USE_PLATFORM_WIN32_KHR
+#endif // VK_USE_PLATFORM_WIN32_KHR
#ifdef VK_USE_PLATFORM_XCB_KHR
{"vkCreateXcbSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateXcbSurfaceKHR)},
{"vkGetPhysicalDeviceXcbPresentationSupportKHR",
reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXcbPresentationSupportKHR)},
-#endif // VK_USE_PLATFORM_XCB_KHR
+#endif // VK_USE_PLATFORM_XCB_KHR
#ifdef VK_USE_PLATFORM_XLIB_KHR
{"vkCreateXlibSurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(CreateXlibSurfaceKHR)},
{"vkGetPhysicalDeviceXlibPresentationSupportKHR",
reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceXlibPresentationSupportKHR)},
-#endif // VK_USE_PLATFORM_XLIB_KHR
+#endif // VK_USE_PLATFORM_XLIB_KHR
{"vkDestroySurfaceKHR", reinterpret_cast<PFN_vkVoidFunction>(DestroySurfaceKHR)},
{"vkGetPhysicalDeviceSurfaceSupportKHR", reinterpret_cast<PFN_vkVoidFunction>(GetPhysicalDeviceSurfaceSupportKHR)},
{"vkGetPhysicalDeviceDisplayPlanePropertiesKHR",
@@ -1448,8 +1433,7 @@
// do not check if VK_KHR_*_surface is enabled (why?)
for (size_t i = 0; i < ARRAY_SIZE(khr_surface_commands); i++) {
- if (!strcmp(khr_surface_commands[i].name, name))
- return khr_surface_commands[i].proc;
+ if (!strcmp(khr_surface_commands[i].name, name)) return khr_surface_commands[i].proc;
}
return nullptr;
@@ -1466,8 +1450,7 @@
};
for (size_t i = 0; i < ARRAY_SIZE(core_device_commands); i++) {
- if (!strcmp(core_device_commands[i].name, name))
- return core_device_commands[i].proc;
+ if (!strcmp(core_device_commands[i].name, name)) return core_device_commands[i].proc;
}
return nullptr;
@@ -1486,14 +1469,13 @@
// do not check if VK_KHR_swapchain is enabled (why?)
for (size_t i = 0; i < ARRAY_SIZE(khr_swapchain_commands); i++) {
- if (!strcmp(khr_swapchain_commands[i].name, name))
- return khr_swapchain_commands[i].proc;
+ if (!strcmp(khr_swapchain_commands[i].name, name)) return khr_swapchain_commands[i].proc;
}
return nullptr;
}
-} // namespace swapchain
+} // namespace swapchain
// vk_layer_logging.h expects these to be defined
diff --git a/layers/swapchain.h b/layers/swapchain.h
index 2346d1e..d2913aa 100644
--- a/layers/swapchain.h
+++ b/layers/swapchain.h
@@ -30,42 +30,44 @@
// Swapchain ERROR codes
enum SWAPCHAIN_ERROR {
- SWAPCHAIN_INVALID_HANDLE, // Handle used that isn't currently valid
- SWAPCHAIN_NULL_POINTER, // Pointer set to NULL, instead of being a valid pointer
- SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED, // Did not enable WSI extension, but called WSI function
- SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN, // Called vkDestroyDevice() before vkDestroySwapchainKHR()
- SWAPCHAIN_CREATE_UNSUPPORTED_SURFACE, // Called vkCreateSwapchainKHR() with a pCreateInfo->surface that wasn't seen as supported
- // by vkGetPhysicalDeviceSurfaceSupportKHR for the device
- SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY, // Called vkCreateSwapchainKHR() without calling a query (e.g.
- // vkGetPhysicalDeviceSurfaceCapabilitiesKHR())
- SWAPCHAIN_CREATE_SWAP_OUT_OF_BOUNDS_EXTENTS, // Called vkCreateSwapchainKHR() with out-of-bounds imageExtent
- SWAPCHAIN_CREATE_SWAP_EXTENTS_NO_MATCH_WIN, // Called vkCreateSwapchainKHR() with imageExtent that doesn't match window's extent
- SWAPCHAIN_CREATE_SWAP_BAD_PRE_TRANSFORM, // Called vkCreateSwapchainKHR() with a non-supported preTransform
- SWAPCHAIN_CREATE_SWAP_BAD_COMPOSITE_ALPHA, // Called vkCreateSwapchainKHR() with a non-supported compositeAlpha
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_ARRAY_LAYERS, // Called vkCreateSwapchainKHR() with a non-supported imageArrayLayers
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_USAGE_FLAGS, // Called vkCreateSwapchainKHR() with a non-supported imageUsageFlags
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_COLOR_SPACE, // Called vkCreateSwapchainKHR() with a non-supported imageColorSpace
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_FORMAT, // Called vkCreateSwapchainKHR() with a non-supported imageFormat
- SWAPCHAIN_CREATE_SWAP_BAD_IMG_FMT_CLR_SP, // Called vkCreateSwapchainKHR() with a non-supported imageColorSpace
- SWAPCHAIN_CREATE_SWAP_BAD_PRESENT_MODE, // Called vkCreateSwapchainKHR() with a non-supported presentMode
- SWAPCHAIN_CREATE_SWAP_BAD_SHARING_MODE, // Called vkCreateSwapchainKHR() with a non-supported imageSharingMode
- SWAPCHAIN_CREATE_SWAP_BAD_SHARING_VALUES, // Called vkCreateSwapchainKHR() with bad values when imageSharingMode is
- // VK_SHARING_MODE_CONCURRENT
- SWAPCHAIN_BAD_BOOL, // VkBool32 that doesn't have value of VK_TRUE or VK_FALSE (e.g. is a non-zero form of true)
- SWAPCHAIN_PRIOR_COUNT, // Query must be called first to get value of pCount, then called second time
- SWAPCHAIN_INVALID_COUNT, // Second time a query called, the pCount value didn't match first time
- SWAPCHAIN_WRONG_STYPE, // The sType for a struct has the wrong value
- SWAPCHAIN_WRONG_NEXT, // The pNext for a struct is not NULL
- SWAPCHAIN_ZERO_VALUE, // A value should be non-zero
- SWAPCHAIN_DID_NOT_QUERY_QUEUE_FAMILIES, // A function using a queueFamilyIndex was called before
- // vkGetPhysicalDeviceQueueFamilyProperties() was called
- SWAPCHAIN_QUEUE_FAMILY_INDEX_TOO_LARGE, // A queueFamilyIndex value is not less than pQueueFamilyPropertyCount returned by
- // vkGetPhysicalDeviceQueueFamilyProperties()
- SWAPCHAIN_SURFACE_NOT_SUPPORTED_WITH_QUEUE, // A surface is not supported by a given queueFamilyIndex, as seen by
- // vkGetPhysicalDeviceSurfaceSupportKHR()
- SWAPCHAIN_GET_SUPPORTED_DISPLAYS_WITHOUT_QUERY, // vkGetDisplayPlaneSupportedDisplaysKHR should be called after querying
- // device display plane properties
- SWAPCHAIN_PLANE_INDEX_TOO_LARGE, // a planeIndex value is larger than what vkGetDisplayPlaneSupportedDisplaysKHR returns
+ SWAPCHAIN_INVALID_HANDLE, // Handle used that isn't currently valid
+ SWAPCHAIN_NULL_POINTER, // Pointer set to NULL, instead of being a valid pointer
+ SWAPCHAIN_EXT_NOT_ENABLED_BUT_USED, // Did not enable WSI extension, but called WSI function
+ SWAPCHAIN_DEL_OBJECT_BEFORE_CHILDREN, // Called vkDestroyDevice() before vkDestroySwapchainKHR()
+ SWAPCHAIN_CREATE_UNSUPPORTED_SURFACE, // Called vkCreateSwapchainKHR() with a pCreateInfo->surface that wasn't seen as
+ // supported
+ // by vkGetPhysicalDeviceSurfaceSupportKHR for the device
+ SWAPCHAIN_CREATE_SWAP_WITHOUT_QUERY, // Called vkCreateSwapchainKHR() without calling a query (e.g.
+ // vkGetPhysicalDeviceSurfaceCapabilitiesKHR())
+ SWAPCHAIN_CREATE_SWAP_OUT_OF_BOUNDS_EXTENTS, // Called vkCreateSwapchainKHR() with out-of-bounds imageExtent
+ SWAPCHAIN_CREATE_SWAP_EXTENTS_NO_MATCH_WIN, // Called vkCreateSwapchainKHR() with imageExtent that doesn't match window's
+ // extent
+ SWAPCHAIN_CREATE_SWAP_BAD_PRE_TRANSFORM, // Called vkCreateSwapchainKHR() with a non-supported preTransform
+ SWAPCHAIN_CREATE_SWAP_BAD_COMPOSITE_ALPHA, // Called vkCreateSwapchainKHR() with a non-supported compositeAlpha
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_ARRAY_LAYERS, // Called vkCreateSwapchainKHR() with a non-supported imageArrayLayers
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_USAGE_FLAGS, // Called vkCreateSwapchainKHR() with a non-supported imageUsageFlags
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_COLOR_SPACE, // Called vkCreateSwapchainKHR() with a non-supported imageColorSpace
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_FORMAT, // Called vkCreateSwapchainKHR() with a non-supported imageFormat
+ SWAPCHAIN_CREATE_SWAP_BAD_IMG_FMT_CLR_SP, // Called vkCreateSwapchainKHR() with a non-supported imageColorSpace
+ SWAPCHAIN_CREATE_SWAP_BAD_PRESENT_MODE, // Called vkCreateSwapchainKHR() with a non-supported presentMode
+ SWAPCHAIN_CREATE_SWAP_BAD_SHARING_MODE, // Called vkCreateSwapchainKHR() with a non-supported imageSharingMode
+ SWAPCHAIN_CREATE_SWAP_BAD_SHARING_VALUES, // Called vkCreateSwapchainKHR() with bad values when imageSharingMode is
+ // VK_SHARING_MODE_CONCURRENT
+ SWAPCHAIN_BAD_BOOL, // VkBool32 that doesn't have value of VK_TRUE or VK_FALSE (e.g. is a non-zero form of true)
+ SWAPCHAIN_PRIOR_COUNT, // Query must be called first to get value of pCount, then called second time
+ SWAPCHAIN_INVALID_COUNT, // Second time a query called, the pCount value didn't match first time
+ SWAPCHAIN_WRONG_STYPE, // The sType for a struct has the wrong value
+ SWAPCHAIN_WRONG_NEXT, // The pNext for a struct is not NULL
+ SWAPCHAIN_ZERO_VALUE, // A value should be non-zero
+ SWAPCHAIN_DID_NOT_QUERY_QUEUE_FAMILIES, // A function using a queueFamilyIndex was called before
+ // vkGetPhysicalDeviceQueueFamilyProperties() was called
+ SWAPCHAIN_QUEUE_FAMILY_INDEX_TOO_LARGE, // A queueFamilyIndex value is not less than pQueueFamilyPropertyCount returned by
+ // vkGetPhysicalDeviceQueueFamilyProperties()
+ SWAPCHAIN_SURFACE_NOT_SUPPORTED_WITH_QUEUE, // A surface is not supported by a given queueFamilyIndex, as seen by
+ // vkGetPhysicalDeviceSurfaceSupportKHR()
+ SWAPCHAIN_GET_SUPPORTED_DISPLAYS_WITHOUT_QUERY, // vkGetDisplayPlaneSupportedDisplaysKHR should be called after querying
+ // device display plane properties
+ SWAPCHAIN_PLANE_INDEX_TOO_LARGE, // a planeIndex value is larger than what vkGetDisplayPlaneSupportedDisplaysKHR returns
};
// The following is for logging error messages:
@@ -230,8 +232,12 @@
std::unordered_map<void *, SwpQueue> queueMap;
layer_data()
- : report_data(nullptr), device_dispatch_table(nullptr), instance_dispatch_table(nullptr), num_tmp_callbacks(0),
- tmp_dbg_create_infos(nullptr), tmp_callbacks(nullptr){};
+ : report_data(nullptr),
+ device_dispatch_table(nullptr),
+ instance_dispatch_table(nullptr),
+ num_tmp_callbacks(0),
+ tmp_dbg_create_infos(nullptr),
+ tmp_callbacks(nullptr){};
};
-#endif // SWAPCHAIN_H
+#endif // SWAPCHAIN_H
diff --git a/layers/threading.cpp b/layers/threading.cpp
index 1997f78..0c6e3f1 100644
--- a/layers/threading.cpp
+++ b/layers/threading.cpp
@@ -42,7 +42,6 @@
static uint32_t loader_layer_if_version = CURRENT_LOADER_LAYER_INTERFACE_VERSION;
static void initThreading(layer_data *my_data, const VkAllocationCallbacks *pAllocator) {
-
layer_debug_actions(my_data->report_data, my_data->logging_callback, pAllocator, "google_threading");
}
@@ -61,8 +60,7 @@
chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
VkResult result = fpCreateInstance(pCreateInfo, pAllocator, pInstance);
- if (result != VK_SUCCESS)
- return result;
+ if (result != VK_SUCCESS) return result;
layer_data *my_data = get_my_data_ptr(get_dispatch_key(*pInstance), layer_data_map);
my_data->instance = *pInstance;
@@ -178,14 +176,13 @@
static const VkLayerProperties layerProps = {
"VK_LAYER_GOOGLE_threading",
- VK_LAYER_API_VERSION, // specVersion
+ VK_LAYER_API_VERSION, // specVersion
1, "Google Validation Layer",
};
static inline PFN_vkVoidFunction layer_intercept_proc(const char *name) {
for (unsigned int i = 0; i < sizeof(procmap) / sizeof(procmap[0]); i++) {
- if (!strcmp(name, procmap[i].name))
- return procmap[i].pFunc;
+ if (!strcmp(name, procmap[i].name)) return procmap[i].pFunc;
}
return NULL;
}
@@ -224,28 +221,18 @@
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL GetPhysicalDeviceProcAddr(VkInstance instance, const char *funcName);
static inline PFN_vkVoidFunction layer_intercept_instance_proc(const char *name) {
- if (!name || name[0] != 'v' || name[1] != 'k')
- return NULL;
+ if (!name || name[0] != 'v' || name[1] != 'k') return NULL;
name += 2;
- if (!strcmp(name, "CreateInstance"))
- return (PFN_vkVoidFunction)CreateInstance;
- if (!strcmp(name, "DestroyInstance"))
- return (PFN_vkVoidFunction)DestroyInstance;
- if (!strcmp(name, "EnumerateInstanceLayerProperties"))
- return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
- if (!strcmp(name, "EnumerateInstanceExtensionProperties"))
- return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
- if (!strcmp(name, "EnumerateDeviceLayerProperties"))
- return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
- if (!strcmp(name, "EnumerateDeviceExtensionProperties"))
- return (PFN_vkVoidFunction)EnumerateDeviceExtensionProperties;
- if (!strcmp(name, "CreateDevice"))
- return (PFN_vkVoidFunction)CreateDevice;
- if (!strcmp(name, "GetInstanceProcAddr"))
- return (PFN_vkVoidFunction)GetInstanceProcAddr;
- if (!strcmp(name, "GetPhysicalDeviceProcAddr"))
- return (PFN_vkVoidFunction)GetPhysicalDeviceProcAddr;
+ if (!strcmp(name, "CreateInstance")) return (PFN_vkVoidFunction)CreateInstance;
+ if (!strcmp(name, "DestroyInstance")) return (PFN_vkVoidFunction)DestroyInstance;
+ if (!strcmp(name, "EnumerateInstanceLayerProperties")) return (PFN_vkVoidFunction)EnumerateInstanceLayerProperties;
+ if (!strcmp(name, "EnumerateInstanceExtensionProperties")) return (PFN_vkVoidFunction)EnumerateInstanceExtensionProperties;
+ if (!strcmp(name, "EnumerateDeviceLayerProperties")) return (PFN_vkVoidFunction)EnumerateDeviceLayerProperties;
+ if (!strcmp(name, "EnumerateDeviceExtensionProperties")) return (PFN_vkVoidFunction)EnumerateDeviceExtensionProperties;
+ if (!strcmp(name, "CreateDevice")) return (PFN_vkVoidFunction)CreateDevice;
+ if (!strcmp(name, "GetInstanceProcAddr")) return (PFN_vkVoidFunction)GetInstanceProcAddr;
+ if (!strcmp(name, "GetPhysicalDeviceProcAddr")) return (PFN_vkVoidFunction)GetPhysicalDeviceProcAddr;
return NULL;
}
@@ -257,14 +244,12 @@
assert(device);
addr = layer_intercept_proc(funcName);
- if (addr)
- return addr;
+ if (addr) return addr;
dev_data = get_my_data_ptr(get_dispatch_key(device), layer_data_map);
VkLayerDispatchTable *pTable = dev_data->device_dispatch_table;
- if (pTable->GetDeviceProcAddr == NULL)
- return NULL;
+ if (pTable->GetDeviceProcAddr == NULL) return NULL;
return pTable->GetDeviceProcAddr(device, funcName);
}
@@ -273,8 +258,7 @@
layer_data *my_data;
addr = layer_intercept_instance_proc(funcName);
- if (!addr)
- addr = layer_intercept_proc(funcName);
+ if (!addr) addr = layer_intercept_proc(funcName);
if (addr) {
return addr;
}
@@ -301,8 +285,7 @@
my_data = get_my_data_ptr(get_dispatch_key(instance), layer_data_map);
VkLayerInstanceDispatchTable *pTable = my_data->instance_dispatch_table;
- if (pTable->GetPhysicalDeviceProcAddr == NULL)
- return NULL;
+ if (pTable->GetPhysicalDeviceProcAddr == NULL) return NULL;
return pTable->GetPhysicalDeviceProcAddr(instance, funcName);
}
@@ -382,7 +365,7 @@
dispatch_key key = get_dispatch_key(device);
layer_data *my_data = get_my_data_ptr(key, layer_data_map);
VkLayerDispatchTable *pTable = my_data->device_dispatch_table;
- const bool lockCommandPool = false; // pool is already directly locked
+ const bool lockCommandPool = false; // pool is already directly locked
bool threadChecks = startMultiThread();
if (threadChecks) {
startReadObject(my_data, device);
@@ -408,7 +391,7 @@
}
}
-} // namespace threading
+} // namespace threading
// vk_layer_logging.h expects these to be defined
diff --git a/layers/threading.h b/layers/threading.h
index 9bd8424..481acd4 100644
--- a/layers/threading.h
+++ b/layers/threading.h
@@ -26,7 +26,7 @@
#include "vk_layer_config.h"
#include "vk_layer_logging.h"
-#if defined(__LP64__) || defined(_WIN64) || defined(__x86_64__) || defined(_M_X64) || defined(__ia64) || defined(_M_IA64) || \
+#if defined(__LP64__) || defined(_WIN64) || defined(__x86_64__) || defined(_M_X64) || defined(__ia64) || defined(_M_IA64) || \
defined(__aarch64__) || defined(__powerpc64__)
// If pointers are 64-bit, then there can be separate counters for each
// NONDISPATCHABLE_HANDLE type. Otherwise they are all typedef uint64_t.
@@ -35,9 +35,9 @@
// Draw State ERROR codes
enum THREADING_CHECKER_ERROR {
- THREADING_CHECKER_NONE, // Used for INFO & other non-error messages
- THREADING_CHECKER_MULTIPLE_THREADS, // Object used simultaneously by multiple threads
- THREADING_CHECKER_SINGLE_THREAD_REUSE, // Object used simultaneously by recursion in single thread
+ THREADING_CHECKER_NONE, // Used for INFO & other non-error messages
+ THREADING_CHECKER_MULTIPLE_THREADS, // Object used simultaneously by multiple threads
+ THREADING_CHECKER_SINGLE_THREAD_REUSE, // Object used simultaneously by recursion in single thread
};
struct object_use_data {
@@ -66,10 +66,11 @@
// finishing check if an application is using vulkan from multiple threads.
inline void finishMultiThread() { vulkan_in_use = false; }
-} // namespace threading
+} // namespace threading
-template <typename T> class counter {
- public:
+template <typename T>
+class counter {
+ public:
const char *typeName;
VkDebugReportObjectTypeEXT objectType;
std::unordered_map<T, object_use_data> uses;
@@ -247,12 +248,15 @@
counter<VkDebugReportCallbackEXT> c_VkDebugReportCallbackEXT;
counter<VkObjectTableNVX> c_VkObjectTableNVX;
counter<VkIndirectCommandsLayoutNVX> c_VkIndirectCommandsLayoutNVX;
-#else // DISTINCT_NONDISPATCHABLE_HANDLES
+#else // DISTINCT_NONDISPATCHABLE_HANDLES
counter<uint64_t> c_uint64_t;
-#endif // DISTINCT_NONDISPATCHABLE_HANDLES
+#endif // DISTINCT_NONDISPATCHABLE_HANDLES
layer_data()
- : report_data(nullptr), num_tmp_callbacks(0), tmp_dbg_create_infos(nullptr), tmp_callbacks(nullptr),
+ : report_data(nullptr),
+ num_tmp_callbacks(0),
+ tmp_dbg_create_infos(nullptr),
+ tmp_callbacks(nullptr),
c_VkCommandBuffer("VkCommandBuffer", VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT),
c_VkDevice("VkDevice", VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT),
c_VkInstance("VkInstance", VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT),
@@ -265,7 +269,8 @@
c_VkDescriptorSet("VkDescriptorSet", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT),
c_VkDescriptorSetLayout("VkDescriptorSetLayout", VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT),
c_VkDeviceMemory("VkDeviceMemory", VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT),
- c_VkEvent("VkEvent", VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT), c_VkFence("VkFence", VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT),
+ c_VkEvent("VkEvent", VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT),
+ c_VkFence("VkFence", VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT),
c_VkFramebuffer("VkFramebuffer", VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT),
c_VkImage("VkImage", VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT),
c_VkImageView("VkImageView", VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT),
@@ -280,20 +285,20 @@
c_VkDebugReportCallbackEXT("VkDebugReportCallbackEXT", VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT),
c_VkObjectTableNVX("VkObjectTableNVX", VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT),
c_VkIndirectCommandsLayoutNVX("VkIndirectCommandsLayoutNVX", VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT)
-#else // DISTINCT_NONDISPATCHABLE_HANDLES
+#else // DISTINCT_NONDISPATCHABLE_HANDLES
c_uint64_t("NON_DISPATCHABLE_HANDLE", VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT)
-#endif // DISTINCT_NONDISPATCHABLE_HANDLES
+#endif // DISTINCT_NONDISPATCHABLE_HANDLES
{};
};
-#define WRAPPER(type) \
- static void startWriteObject(struct layer_data *my_data, type object) { \
- my_data->c_##type.startWrite(my_data->report_data, object); \
- } \
- static void finishWriteObject(struct layer_data *my_data, type object) { my_data->c_##type.finishWrite(object); } \
- static void startReadObject(struct layer_data *my_data, type object) { \
- my_data->c_##type.startRead(my_data->report_data, object); \
- } \
+#define WRAPPER(type) \
+ static void startWriteObject(struct layer_data *my_data, type object) { \
+ my_data->c_##type.startWrite(my_data->report_data, object); \
+ } \
+ static void finishWriteObject(struct layer_data *my_data, type object) { my_data->c_##type.finishWrite(object); } \
+ static void startReadObject(struct layer_data *my_data, type object) { \
+ my_data->c_##type.startRead(my_data->report_data, object); \
+ } \
static void finishReadObject(struct layer_data *my_data, type object) { my_data->c_##type.finishRead(object); }
WRAPPER(VkDevice)
@@ -323,9 +328,9 @@
WRAPPER(VkDebugReportCallbackEXT)
WRAPPER(VkObjectTableNVX)
WRAPPER(VkIndirectCommandsLayoutNVX)
-#else // DISTINCT_NONDISPATCHABLE_HANDLES
+#else // DISTINCT_NONDISPATCHABLE_HANDLES
WRAPPER(uint64_t)
-#endif // DISTINCT_NONDISPATCHABLE_HANDLES
+#endif // DISTINCT_NONDISPATCHABLE_HANDLES
static std::unordered_map<void *, layer_data *> layer_data_map;
static std::mutex command_pool_lock;
@@ -364,4 +369,4 @@
lock.unlock();
finishReadObject(my_data, pool);
}
-#endif // THREADING_H
+#endif // THREADING_H
diff --git a/layers/unique_objects.cpp b/layers/unique_objects.cpp
index 08f1ded..f973ec5 100644
--- a/layers/unique_objects.cpp
+++ b/layers/unique_objects.cpp
@@ -260,8 +260,8 @@
}
static const VkLayerProperties globalLayerProps = {"VK_LAYER_GOOGLE_unique_objects",
- VK_LAYER_API_VERSION, // specVersion
- 1, // implementationVersion
+ VK_LAYER_API_VERSION, // specVersion
+ 1, // implementationVersion
"Google Validation Layer"};
/// Declare prototype for these functions
@@ -269,8 +269,7 @@
static inline PFN_vkVoidFunction layer_intercept_proc(const char *name) {
for (unsigned int i = 0; i < sizeof(procmap) / sizeof(procmap[0]); i++) {
- if (!strcmp(name, procmap[i].name))
- return procmap[i].pFunc;
+ if (!strcmp(name, procmap[i].name)) return procmap[i].pFunc;
}
if (0 == strcmp(name, "vk_layerGetPhysicalDeviceProcAddr")) {
return (PFN_vkVoidFunction)GetPhysicalDeviceProcAddr;
@@ -717,7 +716,7 @@
}
#endif
-} // namespace unique_objects
+} // namespace unique_objects
// vk_layer_logging.h expects these to be defined
VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(VkInstance instance,
diff --git a/layers/unique_objects.h b/layers/unique_objects.h
index 717f679..85d35fe 100644
--- a/layers/unique_objects.h
+++ b/layers/unique_objects.h
@@ -108,7 +108,7 @@
VkDebugReportCallbackEXT *tmp_callbacks;
bool wsi_enabled;
- std::unordered_map<uint64_t, uint64_t> unique_id_mapping; // Map uniqueID to actual object handle
+ std::unordered_map<uint64_t, uint64_t> unique_id_mapping; // Map uniqueID to actual object handle
VkPhysicalDevice gpu;
layer_data() : wsi_enabled(false), gpu(VK_NULL_HANDLE){};
@@ -128,14 +128,15 @@
static std::unordered_map<void *, struct instance_extension_enables> instance_ext_map;
static std::unordered_map<void *, layer_data *> layer_data_map;
-static std::mutex global_lock; // Protect map accesses and unique_id increments
+static std::mutex global_lock; // Protect map accesses and unique_id increments
struct GenericHeader {
VkStructureType sType;
void *pNext;
};
-template <typename T> bool ContainsExtStruct(const T *target, VkStructureType ext_type) {
+template <typename T>
+bool ContainsExtStruct(const T *target, VkStructureType ext_type) {
assert(target != nullptr);
const GenericHeader *ext_struct = reinterpret_cast<const GenericHeader *>(target->pNext);
@@ -151,4 +152,4 @@
return false;
}
-} // namespace unique_objects
+} // namespace unique_objects
diff --git a/layers/vk_layer_config.cpp b/layers/vk_layer_config.cpp
index 49cbb9d..ffefbcd 100644
--- a/layers/vk_layer_config.cpp
+++ b/layers/vk_layer_config.cpp
@@ -34,14 +34,14 @@
#define MAX_CHARS_PER_LINE 4096
class ConfigFile {
- public:
+ public:
ConfigFile();
~ConfigFile();
const char *getOption(const std::string &_option);
void setOption(const std::string &_option, const std::string &_val);
- private:
+ private:
bool m_fileIsParsed;
std::map<std::string, std::string> m_valueMap;
@@ -98,7 +98,6 @@
std::string option_list = g_configFileObj.getOption(_option.c_str());
while (option_list.length() != 0) {
-
// Find length of option string
std::size_t option_length = option_list.find(",");
if (option_length == option_list.npos) {
@@ -158,7 +157,7 @@
"VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG,VK_DBG_LAYER_ACTION_DEBUG_OUTPUT";
m_valueMap["google_unique_objects.debug_action"] =
"VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG,VK_DBG_LAYER_ACTION_DEBUG_OUTPUT";
-#else // WIN32
+#else // WIN32
m_valueMap["lunarg_core_validation.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["lunarg_image.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["lunarg_object_tracker.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
@@ -166,7 +165,7 @@
m_valueMap["lunarg_swapchain.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["google_threading.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
m_valueMap["google_unique_objects.debug_action"] = "VK_DBG_LAYER_ACTION_DEFAULT,VK_DBG_LAYER_ACTION_LOG_MSG";
-#endif // WIN32
+#endif // WIN32
m_valueMap["lunarg_core_validation.log_filename"] = "stdout";
m_valueMap["lunarg_image.log_filename"] = "stdout";
@@ -244,8 +243,7 @@
// discard any comments delimited by '#' in the line
pComment = strchr(buf, '#');
- if (pComment)
- *pComment = '\0';
+ if (pComment) *pComment = '\0';
if (sscanf(buf, " %511[^\n\t =] = %511[^\n \t]", option, value) == 2) {
std::string optStr(option);
@@ -265,26 +263,22 @@
separator = true;
}
if (msgFlags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) {
- if (separator)
- strcat(msg_flags, ",");
+ if (separator) strcat(msg_flags, ",");
strcat(msg_flags, "INFO");
separator = true;
}
if (msgFlags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
- if (separator)
- strcat(msg_flags, ",");
+ if (separator) strcat(msg_flags, ",");
strcat(msg_flags, "WARN");
separator = true;
}
if (msgFlags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) {
- if (separator)
- strcat(msg_flags, ",");
+ if (separator) strcat(msg_flags, ",");
strcat(msg_flags, "PERF");
separator = true;
}
if (msgFlags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
- if (separator)
- strcat(msg_flags, ",");
+ if (separator) strcat(msg_flags, ",");
strcat(msg_flags, "ERROR");
}
}
diff --git a/layers/vk_layer_data.h b/layers/vk_layer_data.h
index b7e71f2..eb2b8b4 100644
--- a/layers/vk_layer_data.h
+++ b/layers/vk_layer_data.h
@@ -23,7 +23,8 @@
#include <unordered_map>
#include "vk_layer_table.h"
-template <typename DATA_T> DATA_T *get_my_data_ptr(void *data_key, std::unordered_map<void *, DATA_T *> &layer_data_map) {
+template <typename DATA_T>
+DATA_T *get_my_data_ptr(void *data_key, std::unordered_map<void *, DATA_T *> &layer_data_map) {
DATA_T *debug_data;
typename std::unordered_map<void *, DATA_T *>::const_iterator got;
@@ -40,4 +41,4 @@
return debug_data;
}
-#endif // LAYER_DATA_H
+#endif // LAYER_DATA_H
diff --git a/layers/vk_layer_extension_utils.h b/layers/vk_layer_extension_utils.h
index b593768..4a51c16 100644
--- a/layers/vk_layer_extension_utils.h
+++ b/layers/vk_layer_extension_utils.h
@@ -36,5 +36,5 @@
VK_LAYER_EXPORT VkResult util_GetLayerProperties(const uint32_t count, const VkLayerProperties *layer_properties, uint32_t *pCount,
VkLayerProperties *pProperties);
-} // extern "C"
-#endif // LAYER_EXTENSION_UTILS_H
+} // extern "C"
+#endif // LAYER_EXTENSION_UTILS_H
diff --git a/layers/vk_layer_logging.h b/layers/vk_layer_logging.h
index 542ee26..b033d1f 100644
--- a/layers/vk_layer_logging.h
+++ b/layers/vk_layer_logging.h
@@ -53,7 +53,6 @@
// Add a debug message callback node structure to the specified callback linked list
static inline void AddDebugMessageCallback(debug_report_data *debug_data, VkLayerDbgFunctionNode **list_head,
VkLayerDbgFunctionNode *new_node) {
-
new_node->pNext = *list_head;
*list_head = new_node;
}
@@ -130,13 +129,12 @@
return bail;
}
-static inline debug_report_data *
-debug_report_create_instance(VkLayerInstanceDispatchTable *table, VkInstance inst, uint32_t extension_count,
- const char *const *ppEnabledExtensions) // layer or extension name to be enabled
+static inline debug_report_data *debug_report_create_instance(
+ VkLayerInstanceDispatchTable *table, VkInstance inst, uint32_t extension_count,
+ const char *const *ppEnabledExtensions) // layer or extension name to be enabled
{
debug_report_data *debug_data = (debug_report_data *)malloc(sizeof(debug_report_data));
- if (!debug_data)
- return NULL;
+ if (!debug_data) return NULL;
memset(debug_data, 0, sizeof(debug_report_data));
for (uint32_t i = 0; i < extension_count; i++) {
@@ -176,12 +174,10 @@
const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT *pCallback) {
VkLayerDbgFunctionNode *pNewDbgFuncNode = (VkLayerDbgFunctionNode *)malloc(sizeof(VkLayerDbgFunctionNode));
- if (!pNewDbgFuncNode)
- return VK_ERROR_OUT_OF_HOST_MEMORY;
+ if (!pNewDbgFuncNode) return VK_ERROR_OUT_OF_HOST_MEMORY;
// Handle of 0 is logging_callback so use allocated Node address as unique handle
- if (!(*pCallback))
- *pCallback = (VkDebugReportCallbackEXT)pNewDbgFuncNode;
+ if (!(*pCallback)) *pCallback = (VkDebugReportCallbackEXT)pNewDbgFuncNode;
pNewDbgFuncNode->msgCallback = *pCallback;
pNewDbgFuncNode->pfnMsgCallback = pCreateInfo->pfnCallback;
pNewDbgFuncNode->msgFlags = pCreateInfo->flags;
@@ -387,4 +383,4 @@
return false;
}
-#endif // LAYER_LOGGING_H
+#endif // LAYER_LOGGING_H
diff --git a/layers/vk_layer_utils.cpp b/layers/vk_layer_utils.cpp
index 00ea4cf..e83e4f9 100644
--- a/layers/vk_layer_utils.cpp
+++ b/layers/vk_layer_utils.cpp
@@ -245,13 +245,13 @@
bool is_ds = false;
switch (format) {
- case VK_FORMAT_D16_UNORM_S8_UINT:
- case VK_FORMAT_D24_UNORM_S8_UINT:
- case VK_FORMAT_D32_SFLOAT_S8_UINT:
- is_ds = true;
- break;
- default:
- break;
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ is_ds = true;
+ break;
+ default:
+ break;
}
return is_ds;
}
@@ -264,13 +264,13 @@
bool is_depth = false;
switch (format) {
- case VK_FORMAT_D16_UNORM:
- case VK_FORMAT_X8_D24_UNORM_PACK32:
- case VK_FORMAT_D32_SFLOAT:
- is_depth = true;
- break;
- default:
- break;
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D32_SFLOAT:
+ is_depth = true;
+ break;
+ default:
+ break;
}
return is_depth;
@@ -281,71 +281,71 @@
bool is_norm = false;
switch (format) {
- case VK_FORMAT_R4G4_UNORM_PACK8:
- case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
- case VK_FORMAT_R5G6B5_UNORM_PACK16:
- case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
- case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
- case VK_FORMAT_R8_UNORM:
- case VK_FORMAT_R8_SNORM:
- case VK_FORMAT_R8G8_UNORM:
- case VK_FORMAT_R8G8_SNORM:
- case VK_FORMAT_R8G8B8_UNORM:
- case VK_FORMAT_R8G8B8_SNORM:
- case VK_FORMAT_R8G8B8A8_UNORM:
- case VK_FORMAT_R8G8B8A8_SNORM:
- case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
- case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
- case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
- case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
- case VK_FORMAT_R16_UNORM:
- case VK_FORMAT_R16_SNORM:
- case VK_FORMAT_R16G16_UNORM:
- case VK_FORMAT_R16G16_SNORM:
- case VK_FORMAT_R16G16B16_UNORM:
- case VK_FORMAT_R16G16B16_SNORM:
- case VK_FORMAT_R16G16B16A16_UNORM:
- case VK_FORMAT_R16G16B16A16_SNORM:
- case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
- case VK_FORMAT_BC2_UNORM_BLOCK:
- case VK_FORMAT_BC3_UNORM_BLOCK:
- case VK_FORMAT_BC4_UNORM_BLOCK:
- case VK_FORMAT_BC4_SNORM_BLOCK:
- case VK_FORMAT_BC5_UNORM_BLOCK:
- case VK_FORMAT_BC5_SNORM_BLOCK:
- case VK_FORMAT_BC7_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11_SNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
- case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
- case VK_FORMAT_B5G6R5_UNORM_PACK16:
- case VK_FORMAT_B8G8R8_UNORM:
- case VK_FORMAT_B8G8R8_SNORM:
- case VK_FORMAT_B8G8R8A8_UNORM:
- case VK_FORMAT_B8G8R8A8_SNORM:
- case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
- case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
- is_norm = true;
- break;
- default:
- break;
+ case VK_FORMAT_R4G4_UNORM_PACK8:
+ case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+ case VK_FORMAT_R5G6B5_UNORM_PACK16:
+ case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+ case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+ case VK_FORMAT_R8_UNORM:
+ case VK_FORMAT_R8_SNORM:
+ case VK_FORMAT_R8G8_UNORM:
+ case VK_FORMAT_R8G8_SNORM:
+ case VK_FORMAT_R8G8B8_UNORM:
+ case VK_FORMAT_R8G8B8_SNORM:
+ case VK_FORMAT_R8G8B8A8_UNORM:
+ case VK_FORMAT_R8G8B8A8_SNORM:
+ case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+ case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+ case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+ case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+ case VK_FORMAT_R16_UNORM:
+ case VK_FORMAT_R16_SNORM:
+ case VK_FORMAT_R16G16_UNORM:
+ case VK_FORMAT_R16G16_SNORM:
+ case VK_FORMAT_R16G16B16_UNORM:
+ case VK_FORMAT_R16G16B16_SNORM:
+ case VK_FORMAT_R16G16B16A16_UNORM:
+ case VK_FORMAT_R16G16B16A16_SNORM:
+ case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+ case VK_FORMAT_BC2_UNORM_BLOCK:
+ case VK_FORMAT_BC3_UNORM_BLOCK:
+ case VK_FORMAT_BC4_UNORM_BLOCK:
+ case VK_FORMAT_BC4_SNORM_BLOCK:
+ case VK_FORMAT_BC5_UNORM_BLOCK:
+ case VK_FORMAT_BC5_SNORM_BLOCK:
+ case VK_FORMAT_BC7_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ case VK_FORMAT_B5G6R5_UNORM_PACK16:
+ case VK_FORMAT_B8G8R8_UNORM:
+ case VK_FORMAT_B8G8R8_SNORM:
+ case VK_FORMAT_B8G8R8A8_UNORM:
+ case VK_FORMAT_B8G8R8A8_SNORM:
+ case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+ case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+ is_norm = true;
+ break;
+ default:
+ break;
}
return is_norm;
@@ -359,31 +359,31 @@
bool is_uint = false;
switch (format) {
- case VK_FORMAT_R8_UINT:
- case VK_FORMAT_R8G8_UINT:
- case VK_FORMAT_R8G8B8_UINT:
- case VK_FORMAT_R8G8B8A8_UINT:
- case VK_FORMAT_A8B8G8R8_UINT_PACK32:
- case VK_FORMAT_A2B10G10R10_UINT_PACK32:
- case VK_FORMAT_R16_UINT:
- case VK_FORMAT_R16G16_UINT:
- case VK_FORMAT_R16G16B16_UINT:
- case VK_FORMAT_R16G16B16A16_UINT:
- case VK_FORMAT_R32_UINT:
- case VK_FORMAT_R32G32_UINT:
- case VK_FORMAT_R32G32B32_UINT:
- case VK_FORMAT_R32G32B32A32_UINT:
- case VK_FORMAT_R64_UINT:
- case VK_FORMAT_R64G64_UINT:
- case VK_FORMAT_R64G64B64_UINT:
- case VK_FORMAT_R64G64B64A64_UINT:
- case VK_FORMAT_B8G8R8_UINT:
- case VK_FORMAT_B8G8R8A8_UINT:
- case VK_FORMAT_A2R10G10B10_UINT_PACK32:
- is_uint = true;
- break;
- default:
- break;
+ case VK_FORMAT_R8_UINT:
+ case VK_FORMAT_R8G8_UINT:
+ case VK_FORMAT_R8G8B8_UINT:
+ case VK_FORMAT_R8G8B8A8_UINT:
+ case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+ case VK_FORMAT_R16_UINT:
+ case VK_FORMAT_R16G16_UINT:
+ case VK_FORMAT_R16G16B16_UINT:
+ case VK_FORMAT_R16G16B16A16_UINT:
+ case VK_FORMAT_R32_UINT:
+ case VK_FORMAT_R32G32_UINT:
+ case VK_FORMAT_R32G32B32_UINT:
+ case VK_FORMAT_R32G32B32A32_UINT:
+ case VK_FORMAT_R64_UINT:
+ case VK_FORMAT_R64G64_UINT:
+ case VK_FORMAT_R64G64B64_UINT:
+ case VK_FORMAT_R64G64B64A64_UINT:
+ case VK_FORMAT_B8G8R8_UINT:
+ case VK_FORMAT_B8G8R8A8_UINT:
+ case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+ is_uint = true;
+ break;
+ default:
+ break;
}
return is_uint;
@@ -394,31 +394,31 @@
bool is_sint = false;
switch (format) {
- case VK_FORMAT_R8_SINT:
- case VK_FORMAT_R8G8_SINT:
- case VK_FORMAT_R8G8B8_SINT:
- case VK_FORMAT_R8G8B8A8_SINT:
- case VK_FORMAT_A8B8G8R8_SINT_PACK32:
- case VK_FORMAT_A2B10G10R10_SINT_PACK32:
- case VK_FORMAT_R16_SINT:
- case VK_FORMAT_R16G16_SINT:
- case VK_FORMAT_R16G16B16_SINT:
- case VK_FORMAT_R16G16B16A16_SINT:
- case VK_FORMAT_R32_SINT:
- case VK_FORMAT_R32G32_SINT:
- case VK_FORMAT_R32G32B32_SINT:
- case VK_FORMAT_R32G32B32A32_SINT:
- case VK_FORMAT_R64_SINT:
- case VK_FORMAT_R64G64_SINT:
- case VK_FORMAT_R64G64B64_SINT:
- case VK_FORMAT_R64G64B64A64_SINT:
- case VK_FORMAT_B8G8R8_SINT:
- case VK_FORMAT_B8G8R8A8_SINT:
- case VK_FORMAT_A2R10G10B10_SINT_PACK32:
- is_sint = true;
- break;
- default:
- break;
+ case VK_FORMAT_R8_SINT:
+ case VK_FORMAT_R8G8_SINT:
+ case VK_FORMAT_R8G8B8_SINT:
+ case VK_FORMAT_R8G8B8A8_SINT:
+ case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+ case VK_FORMAT_R16_SINT:
+ case VK_FORMAT_R16G16_SINT:
+ case VK_FORMAT_R16G16B16_SINT:
+ case VK_FORMAT_R16G16B16A16_SINT:
+ case VK_FORMAT_R32_SINT:
+ case VK_FORMAT_R32G32_SINT:
+ case VK_FORMAT_R32G32B32_SINT:
+ case VK_FORMAT_R32G32B32A32_SINT:
+ case VK_FORMAT_R64_SINT:
+ case VK_FORMAT_R64G64_SINT:
+ case VK_FORMAT_R64G64B64_SINT:
+ case VK_FORMAT_R64G64B64A64_SINT:
+ case VK_FORMAT_B8G8R8_SINT:
+ case VK_FORMAT_B8G8R8A8_SINT:
+ case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+ is_sint = true;
+ break;
+ default:
+ break;
}
return is_sint;
@@ -429,26 +429,26 @@
bool is_float = false;
switch (format) {
- case VK_FORMAT_R16_SFLOAT:
- case VK_FORMAT_R16G16_SFLOAT:
- case VK_FORMAT_R16G16B16_SFLOAT:
- case VK_FORMAT_R16G16B16A16_SFLOAT:
- case VK_FORMAT_R32_SFLOAT:
- case VK_FORMAT_R32G32_SFLOAT:
- case VK_FORMAT_R32G32B32_SFLOAT:
- case VK_FORMAT_R32G32B32A32_SFLOAT:
- case VK_FORMAT_R64_SFLOAT:
- case VK_FORMAT_R64G64_SFLOAT:
- case VK_FORMAT_R64G64B64_SFLOAT:
- case VK_FORMAT_R64G64B64A64_SFLOAT:
- case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
- case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
- case VK_FORMAT_BC6H_UFLOAT_BLOCK:
- case VK_FORMAT_BC6H_SFLOAT_BLOCK:
- is_float = true;
- break;
- default:
- break;
+ case VK_FORMAT_R16_SFLOAT:
+ case VK_FORMAT_R16G16_SFLOAT:
+ case VK_FORMAT_R16G16B16_SFLOAT:
+ case VK_FORMAT_R16G16B16A16_SFLOAT:
+ case VK_FORMAT_R32_SFLOAT:
+ case VK_FORMAT_R32G32_SFLOAT:
+ case VK_FORMAT_R32G32B32_SFLOAT:
+ case VK_FORMAT_R32G32B32A32_SFLOAT:
+ case VK_FORMAT_R64_SFLOAT:
+ case VK_FORMAT_R64G64_SFLOAT:
+ case VK_FORMAT_R64G64B64_SFLOAT:
+ case VK_FORMAT_R64G64B64A64_SFLOAT:
+ case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+ case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+ case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+ case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+ is_float = true;
+ break;
+ default:
+ break;
}
return is_float;
@@ -459,38 +459,38 @@
bool is_srgb = false;
switch (format) {
- case VK_FORMAT_R8_SRGB:
- case VK_FORMAT_R8G8_SRGB:
- case VK_FORMAT_R8G8B8_SRGB:
- case VK_FORMAT_R8G8B8A8_SRGB:
- case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
- case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
- case VK_FORMAT_BC2_SRGB_BLOCK:
- case VK_FORMAT_BC3_SRGB_BLOCK:
- case VK_FORMAT_BC7_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
- case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
- case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
- case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
- case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
- case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
- case VK_FORMAT_B8G8R8_SRGB:
- case VK_FORMAT_B8G8R8A8_SRGB:
- is_srgb = true;
- break;
- default:
- break;
+ case VK_FORMAT_R8_SRGB:
+ case VK_FORMAT_R8G8_SRGB:
+ case VK_FORMAT_R8G8B8_SRGB:
+ case VK_FORMAT_R8G8B8A8_SRGB:
+ case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
+ case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+ case VK_FORMAT_BC2_SRGB_BLOCK:
+ case VK_FORMAT_BC3_SRGB_BLOCK:
+ case VK_FORMAT_BC7_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+ case VK_FORMAT_B8G8R8_SRGB:
+ case VK_FORMAT_B8G8R8A8_SRGB:
+ is_srgb = true;
+ break;
+ default:
+ break;
}
return is_srgb;
@@ -499,63 +499,63 @@
// Return true if format is compressed
VK_LAYER_EXPORT bool vk_format_is_compressed(VkFormat format) {
switch (format) {
- case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
- case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
- case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
- case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
- case VK_FORMAT_BC2_UNORM_BLOCK:
- case VK_FORMAT_BC2_SRGB_BLOCK:
- case VK_FORMAT_BC3_UNORM_BLOCK:
- case VK_FORMAT_BC3_SRGB_BLOCK:
- case VK_FORMAT_BC4_UNORM_BLOCK:
- case VK_FORMAT_BC4_SNORM_BLOCK:
- case VK_FORMAT_BC5_UNORM_BLOCK:
- case VK_FORMAT_BC5_SNORM_BLOCK:
- case VK_FORMAT_BC6H_UFLOAT_BLOCK:
- case VK_FORMAT_BC6H_SFLOAT_BLOCK:
- case VK_FORMAT_BC7_UNORM_BLOCK:
- case VK_FORMAT_BC7_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
- case VK_FORMAT_EAC_R11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11_SNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
- case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
- case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
- case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
- case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
- case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
- case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
- return true;
- default:
- return false;
+ case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+ case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+ case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+ case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+ case VK_FORMAT_BC2_UNORM_BLOCK:
+ case VK_FORMAT_BC2_SRGB_BLOCK:
+ case VK_FORMAT_BC3_UNORM_BLOCK:
+ case VK_FORMAT_BC3_SRGB_BLOCK:
+ case VK_FORMAT_BC4_UNORM_BLOCK:
+ case VK_FORMAT_BC4_SNORM_BLOCK:
+ case VK_FORMAT_BC5_UNORM_BLOCK:
+ case VK_FORMAT_BC5_SNORM_BLOCK:
+ case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+ case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+ case VK_FORMAT_BC7_UNORM_BLOCK:
+ case VK_FORMAT_BC7_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+ return true;
+ default:
+ return false;
}
}
@@ -563,90 +563,90 @@
VK_LAYER_EXPORT VkExtent2D vk_format_compressed_block_size(VkFormat format) {
VkExtent2D block_size = {1, 1};
switch (format) {
- case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
- case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
- case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
- case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
- case VK_FORMAT_BC2_UNORM_BLOCK:
- case VK_FORMAT_BC2_SRGB_BLOCK:
- case VK_FORMAT_BC3_UNORM_BLOCK:
- case VK_FORMAT_BC3_SRGB_BLOCK:
- case VK_FORMAT_BC4_UNORM_BLOCK:
- case VK_FORMAT_BC4_SNORM_BLOCK:
- case VK_FORMAT_BC5_UNORM_BLOCK:
- case VK_FORMAT_BC5_SNORM_BLOCK:
- case VK_FORMAT_BC6H_UFLOAT_BLOCK:
- case VK_FORMAT_BC6H_SFLOAT_BLOCK:
- case VK_FORMAT_BC7_UNORM_BLOCK:
- case VK_FORMAT_BC7_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
- case VK_FORMAT_EAC_R11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11_SNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
- case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
- block_size = {4, 4};
- break;
- case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
- block_size = {5, 4};
- break;
- case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
- block_size = {5, 5};
- break;
- case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
- block_size = {6, 5};
- break;
- case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
- block_size = {6, 6};
- break;
- case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
- block_size = {8, 5};
- break;
- case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
- block_size = {8, 6};
- break;
- case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
- block_size = {8, 8};
- break;
- case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
- block_size = {10, 5};
- break;
- case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
- block_size = {10, 6};
- break;
- case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
- block_size = {10, 8};
- break;
- case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
- block_size = {10, 10};
- break;
- case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
- block_size = {12, 10};
- break;
- case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
- block_size = {12, 12};
- break;
- default:
- break;
+ case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+ case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+ case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+ case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+ case VK_FORMAT_BC2_UNORM_BLOCK:
+ case VK_FORMAT_BC2_SRGB_BLOCK:
+ case VK_FORMAT_BC3_UNORM_BLOCK:
+ case VK_FORMAT_BC3_SRGB_BLOCK:
+ case VK_FORMAT_BC4_UNORM_BLOCK:
+ case VK_FORMAT_BC4_SNORM_BLOCK:
+ case VK_FORMAT_BC5_UNORM_BLOCK:
+ case VK_FORMAT_BC5_SNORM_BLOCK:
+ case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+ case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+ case VK_FORMAT_BC7_UNORM_BLOCK:
+ case VK_FORMAT_BC7_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+ block_size = {4, 4};
+ break;
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ block_size = {5, 4};
+ break;
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ block_size = {5, 5};
+ break;
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ block_size = {6, 5};
+ break;
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ block_size = {6, 6};
+ break;
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ block_size = {8, 5};
+ break;
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ block_size = {8, 6};
+ break;
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ block_size = {8, 8};
+ break;
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+ block_size = {10, 5};
+ break;
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ block_size = {10, 6};
+ break;
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ block_size = {10, 8};
+ break;
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+ block_size = {10, 10};
+ break;
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+ block_size = {12, 10};
+ break;
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+ block_size = {12, 12};
+ break;
+ default:
+ break;
}
return block_size;
}
@@ -753,7 +753,6 @@
// callback will cause the default callbacks to be unregisterd and removed.
VK_LAYER_EXPORT void layer_debug_actions(debug_report_data *report_data, std::vector<VkDebugReportCallbackEXT> &logging_callback,
const VkAllocationCallbacks *pAllocator, const char *layer_identifier) {
-
VkDebugReportCallbackEXT callback = VK_NULL_HANDLE;
std::string report_flags_key = layer_identifier;
diff --git a/layers/vk_layer_utils.h b/layers/vk_layer_utils.h
index a70ae6b..943f846 100644
--- a/layers/vk_layer_utils.h
+++ b/layers/vk_layer_utils.h
@@ -24,9 +24,9 @@
#include "vk_layer_logging.h"
#ifndef WIN32
-#include <strings.h> // For ffs()
+#include <strings.h> // For ffs()
#else
-#include <intrin.h> // For __lzcnt()
+#include <intrin.h> // For __lzcnt()
#endif
#ifdef __cplusplus